EqualityVerifierVerifies whether objects/variable are equal to an expected value Class: org.apache.hadoop.hive.accumulo.TestAccumuloHiveRow InternalCallVerifier EqualityVerifier
@Test public void testWritableEmptyRow() throws IOException {
AccumuloHiveRow emptyRow=new AccumuloHiveRow();
ByteArrayOutputStream baos=new ByteArrayOutputStream();
DataOutputStream out=new DataOutputStream(baos);
emptyRow.write(out);
out.close();
AccumuloHiveRow emptyCopy=new AccumuloHiveRow();
ByteArrayInputStream bais=new ByteArrayInputStream(baos.toByteArray());
DataInputStream in=new DataInputStream(bais);
emptyCopy.readFields(in);
assertEquals(emptyRow,emptyCopy);
}
InternalCallVerifier EqualityVerifier
@Test public void testWritableWithColumns() throws IOException {
AccumuloHiveRow rowWithColumns=new AccumuloHiveRow("row");
rowWithColumns.add("cf","cq1","1".getBytes());
rowWithColumns.add("cf","cq2","2".getBytes());
ByteArrayOutputStream baos=new ByteArrayOutputStream();
DataOutputStream out=new DataOutputStream(baos);
rowWithColumns.write(out);
out.close();
AccumuloHiveRow copy=new AccumuloHiveRow();
ByteArrayInputStream bais=new ByteArrayInputStream(baos.toByteArray());
DataInputStream in=new DataInputStream(bais);
copy.readFields(in);
assertEquals(rowWithColumns,copy);
}
IterativeVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testGetValueFromColumn(){
AccumuloHiveRow row=new AccumuloHiveRow("row1");
assertNull(row.getValue(new Text(""),new Text("")));
for (int i=1; i <= 5; i++) {
row.add("cf","cq" + i,Integer.toString(i).getBytes());
}
assertNull(row.getValue(new Text("cf"),new Text("cq0")));
for (int i=1; i <= 5; i++) {
assertArrayEquals(Integer.toString(i).getBytes(),row.getValue(new Text("cf"),new Text("cq" + i)));
}
}
Class: org.apache.hadoop.hive.accumulo.TestAccumuloStorageHandler BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testTablePropertiesPassedToOutputJobProperties(){
TableDesc tableDesc=Mockito.mock(TableDesc.class);
Properties props=new Properties();
Map jobProperties=new HashMap();
props.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,"cf:cq1,cf:cq2,cf:cq3");
props.setProperty(AccumuloSerDeParameters.TABLE_NAME,"table");
props.setProperty(AccumuloSerDeParameters.VISIBILITY_LABEL_KEY,"foo");
Mockito.when(tableDesc.getProperties()).thenReturn(props);
storageHandler.configureOutputJobProperties(tableDesc,jobProperties);
Assert.assertEquals(3,jobProperties.size());
Assert.assertTrue("Job properties did not contain column mappings",jobProperties.containsKey(AccumuloSerDeParameters.COLUMN_MAPPINGS));
Assert.assertEquals(props.getProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS),jobProperties.get(AccumuloSerDeParameters.COLUMN_MAPPINGS));
Assert.assertTrue("Job properties did not contain accumulo table name",jobProperties.containsKey(AccumuloSerDeParameters.TABLE_NAME));
Assert.assertEquals(props.getProperty(AccumuloSerDeParameters.TABLE_NAME),jobProperties.get(AccumuloSerDeParameters.TABLE_NAME));
Assert.assertTrue("Job properties did not contain visibility label",jobProperties.containsKey(AccumuloSerDeParameters.VISIBILITY_LABEL_KEY));
Assert.assertEquals(props.getProperty(AccumuloSerDeParameters.VISIBILITY_LABEL_KEY),jobProperties.get(AccumuloSerDeParameters.VISIBILITY_LABEL_KEY));
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testTablePropertiesPassedToInputJobProperties(){
TableDesc tableDesc=Mockito.mock(TableDesc.class);
Properties props=new Properties();
Map jobProperties=new HashMap();
props.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,"cf:cq1,cf:cq2,cf:cq3");
props.setProperty(AccumuloSerDeParameters.TABLE_NAME,"table");
props.setProperty(AccumuloSerDeParameters.ITERATOR_PUSHDOWN_KEY,"true");
props.setProperty(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE,ColumnEncoding.BINARY.getName());
props.setProperty(AccumuloSerDeParameters.AUTHORIZATIONS_KEY,"foo,bar");
Mockito.when(tableDesc.getProperties()).thenReturn(props);
storageHandler.configureInputJobProperties(tableDesc,jobProperties);
Assert.assertEquals(5,jobProperties.size());
Assert.assertTrue(jobProperties.containsKey(AccumuloSerDeParameters.COLUMN_MAPPINGS));
Assert.assertEquals(props.getProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS),jobProperties.get(AccumuloSerDeParameters.COLUMN_MAPPINGS));
Assert.assertTrue(jobProperties.containsKey(AccumuloSerDeParameters.TABLE_NAME));
Assert.assertEquals(props.getProperty(AccumuloSerDeParameters.TABLE_NAME),jobProperties.get(AccumuloSerDeParameters.TABLE_NAME));
Assert.assertTrue(jobProperties.containsKey(AccumuloSerDeParameters.ITERATOR_PUSHDOWN_KEY));
Assert.assertEquals(props.getProperty(AccumuloSerDeParameters.ITERATOR_PUSHDOWN_KEY),jobProperties.get(AccumuloSerDeParameters.ITERATOR_PUSHDOWN_KEY));
Assert.assertTrue(jobProperties.containsKey(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE));
Assert.assertEquals(props.getProperty(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE),jobProperties.get(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE));
Assert.assertTrue(jobProperties.containsKey(AccumuloSerDeParameters.AUTHORIZATIONS_KEY));
Assert.assertEquals(props.getProperty(AccumuloSerDeParameters.AUTHORIZATIONS_KEY),jobProperties.get(AccumuloSerDeParameters.AUTHORIZATIONS_KEY));
}
Class: org.apache.hadoop.hive.accumulo.TestHiveAccumuloHelper InternalCallVerifier EqualityVerifier
@Test public void testTokenToConfFromUser() throws Exception {
UserGroupInformation ugi=Mockito.mock(UserGroupInformation.class);
JobConf jobConf=new JobConf();
ArrayList> tokens=new ArrayList<>();
Text service=new Text("service");
Token> token=Mockito.mock(Token.class);
tokens.add(token);
Mockito.when(ugi.getTokens()).thenReturn(tokens);
Mockito.when(token.getKind()).thenReturn(HiveAccumuloHelper.ACCUMULO_SERVICE);
Mockito.when(token.getService()).thenReturn(service);
try {
helper.addTokenFromUserToJobConf(ugi,jobConf);
}
catch ( IOException e) {
log.info("Ignoring exception, likely coming from Hadoop 1",e);
return;
}
Collection> credTokens=jobConf.getCredentials().getAllTokens();
assertEquals(1,credTokens.size());
assertEquals(service,credTokens.iterator().next().getService());
}
InternalCallVerifier EqualityVerifier
@Test public void testTokenMerge() throws Exception {
final Text service=new Text("service");
Token> token=Mockito.mock(Token.class);
JobConf jobConf=new JobConf();
Mockito.when(token.getService()).thenReturn(service);
try {
helper.mergeTokenIntoJobConf(jobConf,token);
}
catch ( IOException e) {
log.info("Ignoring exception, likely coming from Hadoop 1",e);
return;
}
Collection> tokens=jobConf.getCredentials().getAllTokens();
assertEquals(1,tokens.size());
assertEquals(service,tokens.iterator().next().getService());
}
Class: org.apache.hadoop.hive.accumulo.TestLazyAccumuloMap APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testBinaryIntMap() throws SerDeException, IOException {
AccumuloHiveRow row=new AccumuloHiveRow("row");
row.add(new Text("cf1"),new Text(toBytes(1)),toBytes(2));
row.add(new Text("cf1"),new Text(toBytes(2)),toBytes(4));
row.add(new Text("cf1"),new Text(toBytes(3)),toBytes(6));
HiveAccumuloMapColumnMapping mapping=new HiveAccumuloMapColumnMapping("cf1",null,ColumnEncoding.BINARY,ColumnEncoding.BINARY,"column",TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.intTypeInfo,TypeInfoFactory.intTypeInfo).toString());
Text nullSequence=new Text("\\N");
ObjectInspector oi=LazyFactory.createLazyObjectInspector(TypeInfoUtils.getTypeInfosFromTypeString("map").get(0),new byte[]{(byte)1,(byte)2},0,nullSequence,false,(byte)0);
LazyAccumuloMap map=new LazyAccumuloMap((LazyMapObjectInspector)oi);
map.init(row,mapping);
Assert.assertEquals(3,map.getMapSize());
Object o=map.getMapValueElement(new IntWritable(1));
Assert.assertNotNull(o);
Assert.assertEquals(new IntWritable(2),((LazyInteger)o).getWritableObject());
o=map.getMapValueElement(new IntWritable(2));
Assert.assertNotNull(o);
Assert.assertEquals(new IntWritable(4),((LazyInteger)o).getWritableObject());
o=map.getMapValueElement(new IntWritable(3));
Assert.assertNotNull(o);
Assert.assertEquals(new IntWritable(6),((LazyInteger)o).getWritableObject());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testStringMapWithProjection() throws SerDeException {
AccumuloHiveRow row=new AccumuloHiveRow("row");
row.add("cf1","foo","bar".getBytes());
row.add("cf1","bar","foo".getBytes());
row.add("cf2","foo1","bar1".getBytes());
row.add("cf3","bar1","foo1".getBytes());
HiveAccumuloMapColumnMapping mapping=new HiveAccumuloMapColumnMapping("cf1",null,ColumnEncoding.STRING,ColumnEncoding.STRING,"column",TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo).toString());
Text nullSequence=new Text("\\N");
ObjectInspector oi=LazyFactory.createLazyObjectInspector(TypeInfoUtils.getTypeInfosFromTypeString("map").get(0),new byte[]{(byte)1,(byte)2},0,nullSequence,false,(byte)0);
LazyAccumuloMap map=new LazyAccumuloMap((LazyMapObjectInspector)oi);
map.init(row,mapping);
Assert.assertEquals(2,map.getMapSize());
Object o=map.getMapValueElement(new Text("foo"));
Assert.assertNotNull(o);
Assert.assertEquals(new Text("bar"),((LazyString)o).getWritableObject());
o=map.getMapValueElement(new Text("bar"));
Assert.assertNotNull(o);
Assert.assertEquals(new Text("foo"),((LazyString)o).getWritableObject());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testMixedSerializationMap() throws SerDeException, IOException {
AccumuloHiveRow row=new AccumuloHiveRow("row");
row.add(new Text("cf1"),new Text(toBytes(1)),"2".getBytes());
row.add(new Text("cf1"),new Text(toBytes(2)),"4".getBytes());
row.add(new Text("cf1"),new Text(toBytes(3)),"6".getBytes());
HiveAccumuloMapColumnMapping mapping=new HiveAccumuloMapColumnMapping("cf1",null,ColumnEncoding.BINARY,ColumnEncoding.STRING,"column",TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.intTypeInfo,TypeInfoFactory.intTypeInfo).toString());
Text nullSequence=new Text("\\N");
ObjectInspector oi=LazyFactory.createLazyObjectInspector(TypeInfoUtils.getTypeInfosFromTypeString("map").get(0),new byte[]{(byte)1,(byte)2},0,nullSequence,false,(byte)0);
LazyAccumuloMap map=new LazyAccumuloMap((LazyMapObjectInspector)oi);
map.init(row,mapping);
Assert.assertEquals(3,map.getMapSize());
Object o=map.getMapValueElement(new IntWritable(1));
Assert.assertNotNull(o);
Assert.assertEquals(new IntWritable(2),((LazyInteger)o).getWritableObject());
o=map.getMapValueElement(new IntWritable(2));
Assert.assertNotNull(o);
Assert.assertEquals(new IntWritable(4),((LazyInteger)o).getWritableObject());
o=map.getMapValueElement(new IntWritable(3));
Assert.assertNotNull(o);
Assert.assertEquals(new IntWritable(6),((LazyInteger)o).getWritableObject());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testIntMap() throws SerDeException, IOException {
AccumuloHiveRow row=new AccumuloHiveRow("row");
row.add(new Text("cf1"),new Text("1"),"2".getBytes());
row.add(new Text("cf1"),new Text("2"),"4".getBytes());
row.add(new Text("cf1"),new Text("3"),"6".getBytes());
HiveAccumuloMapColumnMapping mapping=new HiveAccumuloMapColumnMapping("cf1",null,ColumnEncoding.STRING,ColumnEncoding.STRING,"column",TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.intTypeInfo,TypeInfoFactory.intTypeInfo).toString());
Text nullSequence=new Text("\\N");
ObjectInspector oi=LazyFactory.createLazyObjectInspector(TypeInfoUtils.getTypeInfosFromTypeString("map").get(0),new byte[]{(byte)1,(byte)2},0,nullSequence,false,(byte)0);
LazyAccumuloMap map=new LazyAccumuloMap((LazyMapObjectInspector)oi);
map.init(row,mapping);
Assert.assertEquals(3,map.getMapSize());
Object o=map.getMapValueElement(new IntWritable(1));
Assert.assertNotNull(o);
Assert.assertEquals(new IntWritable(2),((LazyInteger)o).getWritableObject());
o=map.getMapValueElement(new IntWritable(2));
Assert.assertNotNull(o);
Assert.assertEquals(new IntWritable(4),((LazyInteger)o).getWritableObject());
o=map.getMapValueElement(new IntWritable(3));
Assert.assertNotNull(o);
Assert.assertEquals(new IntWritable(6),((LazyInteger)o).getWritableObject());
}
Class: org.apache.hadoop.hive.accumulo.TestLazyAccumuloRow APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testDeserializationOfBinaryEncoding() throws Exception {
List columns=Arrays.asList("row","given_name","surname","age","weight","height");
List types=Arrays.asList(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo,TypeInfoFactory.intTypeInfo,TypeInfoFactory.intTypeInfo,TypeInfoFactory.intTypeInfo);
LazySimpleStructObjectInspector objectInspector=(LazySimpleStructObjectInspector)LazyFactory.createLazyStructInspector(columns,types,LazySerDeParameters.DefaultSeparators,new Text("\\N"),false,false,(byte)'\\');
DefaultAccumuloRowIdFactory rowIdFactory=new DefaultAccumuloRowIdFactory();
Properties props=new Properties();
props.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,":rowid#s,personal:given_name#s,personal:surname#s,personal:age,personal:weight,personal:height");
props.setProperty(serdeConstants.LIST_COLUMNS,Joiner.on(',').join(columns));
props.setProperty(serdeConstants.LIST_COLUMN_TYPES,Joiner.on(',').join(types));
props.setProperty(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE,ColumnEncoding.BINARY.getName());
AccumuloSerDeParameters params=new AccumuloSerDeParameters(new Configuration(),props,AccumuloSerDe.class.getName());
rowIdFactory.init(params,props);
ByteArrayOutputStream baos=new ByteArrayOutputStream();
DataOutputStream out=new DataOutputStream(baos);
LazyAccumuloRow lazyRow=new LazyAccumuloRow(objectInspector);
AccumuloHiveRow hiveRow=new AccumuloHiveRow("1");
hiveRow.add("personal","given_name","Bob".getBytes());
hiveRow.add("personal","surname","Stevens".getBytes());
out.writeInt(30);
hiveRow.add("personal","age",baos.toByteArray());
baos.reset();
out.writeInt(200);
hiveRow.add("personal","weight",baos.toByteArray());
baos.reset();
out.writeInt(72);
hiveRow.add("personal","height",baos.toByteArray());
ColumnMapper columnMapper=params.getColumnMapper();
lazyRow.init(hiveRow,columnMapper.getColumnMappings(),rowIdFactory);
Object o=lazyRow.getField(0);
Assert.assertNotNull(o);
Assert.assertEquals(LazyString.class,o.getClass());
Assert.assertEquals("1",((LazyString)o).toString());
o=lazyRow.getField(1);
Assert.assertNotNull(o);
Assert.assertEquals(LazyString.class,o.getClass());
Assert.assertEquals("Bob",((LazyString)o).toString());
o=lazyRow.getField(2);
Assert.assertNotNull(o);
Assert.assertEquals(LazyString.class,o.getClass());
Assert.assertEquals("Stevens",((LazyString)o).toString());
o=lazyRow.getField(3);
Assert.assertNotNull(o);
Assert.assertEquals(LazyDioInteger.class,o.getClass());
Assert.assertEquals("30",((LazyDioInteger)o).toString());
o=lazyRow.getField(4);
Assert.assertNotNull(o);
Assert.assertEquals(LazyDioInteger.class,o.getClass());
Assert.assertEquals("200",((LazyDioInteger)o).toString());
o=lazyRow.getField(5);
Assert.assertNotNull(o);
Assert.assertEquals(LazyDioInteger.class,o.getClass());
Assert.assertEquals("72",((LazyDioInteger)o).toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testExpectedDeserializationOfColumns() throws Exception {
List columns=Arrays.asList("row","given_name","surname","age","weight","height");
List types=Arrays.asList(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo,TypeInfoFactory.intTypeInfo,TypeInfoFactory.intTypeInfo,TypeInfoFactory.intTypeInfo);
LazySimpleStructObjectInspector objectInspector=(LazySimpleStructObjectInspector)LazyFactory.createLazyStructInspector(columns,types,LazySerDeParameters.DefaultSeparators,new Text("\\N"),false,false,(byte)'\\');
DefaultAccumuloRowIdFactory rowIdFactory=new DefaultAccumuloRowIdFactory();
Properties props=new Properties();
props.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,":rowid,personal:given_name,personal:surname,personal:age,personal:weight,personal:height");
props.setProperty(serdeConstants.LIST_COLUMNS,Joiner.on(',').join(columns));
props.setProperty(serdeConstants.LIST_COLUMN_TYPES,Joiner.on(',').join(types));
AccumuloSerDeParameters params=new AccumuloSerDeParameters(new Configuration(),props,AccumuloSerDe.class.getName());
rowIdFactory.init(params,props);
LazyAccumuloRow lazyRow=new LazyAccumuloRow(objectInspector);
AccumuloHiveRow hiveRow=new AccumuloHiveRow("1");
hiveRow.add("personal","given_name","Bob".getBytes());
hiveRow.add("personal","surname","Stevens".getBytes());
hiveRow.add("personal","age","30".getBytes());
hiveRow.add("personal","weight","200".getBytes());
hiveRow.add("personal","height","72".getBytes());
ColumnMapper columnMapper=params.getColumnMapper();
lazyRow.init(hiveRow,columnMapper.getColumnMappings(),rowIdFactory);
Object o=lazyRow.getField(0);
Assert.assertEquals(LazyString.class,o.getClass());
Assert.assertEquals("1",((LazyString)o).toString());
o=lazyRow.getField(1);
Assert.assertEquals(LazyString.class,o.getClass());
Assert.assertEquals("Bob",((LazyString)o).toString());
o=lazyRow.getField(2);
Assert.assertEquals(LazyString.class,o.getClass());
Assert.assertEquals("Stevens",((LazyString)o).toString());
o=lazyRow.getField(3);
Assert.assertEquals(LazyInteger.class,o.getClass());
Assert.assertEquals("30",((LazyInteger)o).toString());
o=lazyRow.getField(4);
Assert.assertEquals(LazyInteger.class,o.getClass());
Assert.assertEquals("200",((LazyInteger)o).toString());
o=lazyRow.getField(5);
Assert.assertEquals(LazyInteger.class,o.getClass());
Assert.assertEquals("72",((LazyInteger)o).toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testNullInit() throws SerDeException {
List columns=Arrays.asList("row","1","2","3");
List types=Arrays.asList(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME),TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME),TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME),TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME));
LazySimpleStructObjectInspector objectInspector=(LazySimpleStructObjectInspector)LazyFactory.createLazyStructInspector(columns,types,LazySerDeParameters.DefaultSeparators,new Text("\\N"),false,false,(byte)'\\');
DefaultAccumuloRowIdFactory rowIdFactory=new DefaultAccumuloRowIdFactory();
Properties props=new Properties();
props.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,":rowid,cf:cq1,cf:cq2,cf:cq3");
props.setProperty(serdeConstants.LIST_COLUMNS,Joiner.on(',').join(columns));
props.setProperty(serdeConstants.LIST_COLUMN_TYPES,Joiner.on(',').join(types));
AccumuloSerDeParameters params=new AccumuloSerDeParameters(new Configuration(),props,AccumuloSerDe.class.getName());
rowIdFactory.init(params,props);
ColumnMapper columnMapper=params.getColumnMapper();
LazyAccumuloRow lazyRow=new LazyAccumuloRow(objectInspector);
AccumuloHiveRow hiveRow=new AccumuloHiveRow("1");
hiveRow.add("cf","cq1","foo".getBytes());
hiveRow.add("cf","cq3","bar".getBytes());
lazyRow.init(hiveRow,columnMapper.getColumnMappings(),rowIdFactory);
Assert.assertEquals("{'row':'1','1':'foo','2':null,'3':'bar'}".replace('\'','"'),SerDeUtils.getJSONString(lazyRow,objectInspector));
Assert.assertEquals("{'row':'1','1':'foo','2':null,'3':'bar'}".replace('\'','"'),SerDeUtils.getJSONString(lazyRow,objectInspector));
}
Class: org.apache.hadoop.hive.accumulo.columns.TestColumnEncoding EqualityVerifier
@Test public void testBinaryEncoding(){
Assert.assertEquals(ColumnEncoding.BINARY,ColumnEncoding.fromCode("b"));
}
EqualityVerifier
@Test public void testMapEncodingParsing(){
Entry stringString=Maps.immutableEntry(ColumnEncoding.STRING,ColumnEncoding.STRING), stringBinary=Maps.immutableEntry(ColumnEncoding.STRING,ColumnEncoding.BINARY), binaryBinary=Maps.immutableEntry(ColumnEncoding.BINARY,ColumnEncoding.BINARY), binaryString=Maps.immutableEntry(ColumnEncoding.BINARY,ColumnEncoding.STRING);
Assert.assertEquals(stringString,ColumnEncoding.getMapEncoding("s:s"));
Assert.assertEquals(stringString,ColumnEncoding.getMapEncoding("s:string"));
Assert.assertEquals(stringString,ColumnEncoding.getMapEncoding("string:s"));
Assert.assertEquals(stringString,ColumnEncoding.getMapEncoding("string:string"));
Assert.assertEquals(stringBinary,ColumnEncoding.getMapEncoding("s:b"));
Assert.assertEquals(stringBinary,ColumnEncoding.getMapEncoding("string:b"));
Assert.assertEquals(stringBinary,ColumnEncoding.getMapEncoding("s:binary"));
Assert.assertEquals(stringBinary,ColumnEncoding.getMapEncoding("string:binary"));
Assert.assertEquals(binaryString,ColumnEncoding.getMapEncoding("b:s"));
Assert.assertEquals(binaryString,ColumnEncoding.getMapEncoding("b:string"));
Assert.assertEquals(binaryString,ColumnEncoding.getMapEncoding("binary:s"));
Assert.assertEquals(binaryString,ColumnEncoding.getMapEncoding("binary:string"));
Assert.assertEquals(binaryBinary,ColumnEncoding.getMapEncoding("b:b"));
Assert.assertEquals(binaryBinary,ColumnEncoding.getMapEncoding("binary:b"));
Assert.assertEquals(binaryBinary,ColumnEncoding.getMapEncoding("b:binary"));
Assert.assertEquals(binaryBinary,ColumnEncoding.getMapEncoding("binary:binary"));
}
EqualityVerifier
@Test public void testParse(){
Assert.assertEquals(ColumnEncoding.STRING,ColumnEncoding.getFromMapping("foo:bar#s"));
}
EqualityVerifier
@Test public void testStringEncoding(){
Assert.assertEquals(ColumnEncoding.STRING,ColumnEncoding.fromCode("s"));
}
EqualityVerifier
@Test public void testStripCodeWithEscapedPound(){
String mapping="foo:ba\\#r";
Assert.assertEquals(mapping,ColumnEncoding.stripCode(mapping + AccumuloHiveConstants.POUND + ColumnEncoding.BINARY.getCode()));
}
EqualityVerifier
@Test public void testParseWithEscapedPound(){
Assert.assertEquals(ColumnEncoding.BINARY,ColumnEncoding.getFromMapping("fo\\#o:bar#b"));
}
EqualityVerifier
@Test public void testStripCode(){
String mapping="foo:bar";
Assert.assertEquals(mapping,ColumnEncoding.stripCode(mapping + AccumuloHiveConstants.POUND + ColumnEncoding.BINARY.getCode()));
}
Class: org.apache.hadoop.hive.accumulo.columns.TestColumnMapper APIUtilityVerifier IterativeVerifier BranchVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testNormalMapping() throws TooManyAccumuloColumnsException {
List rawMappings=Arrays.asList(AccumuloHiveConstants.ROWID,"cf:cq","cf:_","cf:qual");
List columnNames=Arrays.asList("row","col1","col2","col3");
List columnTypes=Arrays.asList(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo);
ColumnMapper mapper=new ColumnMapper(Joiner.on(AccumuloHiveConstants.COMMA).join(rawMappings),ColumnEncoding.STRING.getName(),columnNames,columnTypes);
List mappings=mapper.getColumnMappings();
Assert.assertEquals(rawMappings.size(),mappings.size());
Assert.assertEquals(mappings.size(),mapper.size());
Iterator rawIter=rawMappings.iterator();
Iterator iter=mappings.iterator();
for (int i=0; i < mappings.size() && iter.hasNext(); i++) {
String rawMapping=rawIter.next();
ColumnMapping mapping=iter.next();
ColumnMapping mappingByOffset=mapper.get(i);
Assert.assertEquals(mapping,mappingByOffset);
if (AccumuloHiveConstants.ROWID.equals(rawMapping)) {
Assert.assertEquals(HiveAccumuloRowIdColumnMapping.class,mapping.getClass());
}
else {
Assert.assertEquals(HiveAccumuloColumnMapping.class,mapping.getClass());
}
}
Assert.assertEquals(0,mapper.getRowIdOffset());
Assert.assertTrue(mapper.hasRowIdMapping());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testMap() throws TooManyAccumuloColumnsException {
List hiveColumns=Arrays.asList("rowid","col1","col2","col3");
List columnTypes=Arrays.asList(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo),TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo),TypeInfoFactory.stringTypeInfo);
List rawMappings=Arrays.asList(AccumuloHiveConstants.ROWID,"cf1:*","cf2:2*","cq3:bar\\*");
ColumnMapper mapper=new ColumnMapper(Joiner.on(AccumuloHiveConstants.COMMA).join(rawMappings),ColumnEncoding.BINARY.getName(),hiveColumns,columnTypes);
List mappings=mapper.getColumnMappings();
Assert.assertEquals(4,mappings.size());
Assert.assertEquals(HiveAccumuloRowIdColumnMapping.class,mappings.get(0).getClass());
Assert.assertEquals(HiveAccumuloMapColumnMapping.class,mappings.get(1).getClass());
Assert.assertEquals(HiveAccumuloMapColumnMapping.class,mappings.get(2).getClass());
Assert.assertEquals(HiveAccumuloColumnMapping.class,mappings.get(3).getClass());
HiveAccumuloRowIdColumnMapping row=(HiveAccumuloRowIdColumnMapping)mappings.get(0);
Assert.assertEquals(ColumnEncoding.BINARY,row.getEncoding());
Assert.assertEquals(hiveColumns.get(0),row.getColumnName());
Assert.assertEquals(columnTypes.get(0).toString(),row.getColumnType());
HiveAccumuloMapColumnMapping map=(HiveAccumuloMapColumnMapping)mappings.get(1);
Assert.assertEquals("cf1",map.getColumnFamily());
Assert.assertEquals("",map.getColumnQualifierPrefix());
Assert.assertEquals(ColumnEncoding.BINARY,map.getEncoding());
Assert.assertEquals(hiveColumns.get(1),map.getColumnName());
Assert.assertEquals(columnTypes.get(1).toString(),map.getColumnType());
map=(HiveAccumuloMapColumnMapping)mappings.get(2);
Assert.assertEquals("cf2",map.getColumnFamily());
Assert.assertEquals("2",map.getColumnQualifierPrefix());
Assert.assertEquals(ColumnEncoding.BINARY,map.getEncoding());
Assert.assertEquals(hiveColumns.get(2),map.getColumnName());
Assert.assertEquals(columnTypes.get(2).toString(),map.getColumnType());
HiveAccumuloColumnMapping column=(HiveAccumuloColumnMapping)mappings.get(3);
Assert.assertEquals("cq3",column.getColumnFamily());
Assert.assertEquals("bar*",column.getColumnQualifier());
Assert.assertEquals(ColumnEncoding.BINARY,column.getEncoding());
Assert.assertEquals(hiveColumns.get(3),column.getColumnName());
Assert.assertEquals(columnTypes.get(3).toString(),column.getColumnType());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testGetTypesString() throws TooManyAccumuloColumnsException {
List hiveColumns=Arrays.asList("rowid","col1","col2","col3");
List rawMappings=Arrays.asList(AccumuloHiveConstants.ROWID,"cf:cq","cf:_","cf:qual");
List columnTypes=Arrays.asList(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo);
ColumnMapper mapper=new ColumnMapper(Joiner.on(AccumuloHiveConstants.COMMA).join(rawMappings),null,hiveColumns,columnTypes);
String typeString=mapper.getTypesString();
String[] types=StringUtils.split(typeString,AccumuloHiveConstants.COLON);
Assert.assertEquals(rawMappings.size(),types.length);
for ( String type : types) {
Assert.assertEquals(serdeConstants.STRING_TYPE_NAME,type);
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testGetMappingFromHiveColumn() throws TooManyAccumuloColumnsException {
List hiveColumns=Arrays.asList("rowid","col1","col2","col3");
List columnTypes=Arrays.asList(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo);
List rawMappings=Arrays.asList(AccumuloHiveConstants.ROWID,"cf:cq","cf:_","cf:qual");
ColumnMapper mapper=new ColumnMapper(Joiner.on(AccumuloHiveConstants.COMMA).join(rawMappings),null,hiveColumns,columnTypes);
for (int i=0; i < hiveColumns.size(); i++) {
String hiveColumn=hiveColumns.get(i), accumuloMapping=rawMappings.get(i);
ColumnMapping mapping=mapper.getColumnMappingForHiveColumn(hiveColumns,hiveColumn);
Assert.assertEquals(accumuloMapping,mapping.getMappingSpec());
}
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDefaultBinary() throws TooManyAccumuloColumnsException {
List hiveColumns=Arrays.asList("rowid","col1","col2","col3","col4");
List rawMappings=Arrays.asList(AccumuloHiveConstants.ROWID,"cf:cq","cf:_#s","cf:qual#s","cf:qual2");
List columnTypes=Arrays.asList(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo);
ColumnMapper mapper=new ColumnMapper(Joiner.on(AccumuloHiveConstants.COMMA).join(rawMappings),ColumnEncoding.BINARY.getName(),hiveColumns,columnTypes);
List mappings=mapper.getColumnMappings();
Assert.assertEquals(5,mappings.size());
Assert.assertEquals(ColumnEncoding.BINARY,mappings.get(0).getEncoding());
Assert.assertEquals(columnTypes.get(0).toString(),mappings.get(0).getColumnType());
Assert.assertEquals(ColumnEncoding.BINARY,mappings.get(1).getEncoding());
Assert.assertEquals(columnTypes.get(1).toString(),mappings.get(1).getColumnType());
Assert.assertEquals(ColumnEncoding.STRING,mappings.get(2).getEncoding());
Assert.assertEquals(columnTypes.get(2).toString(),mappings.get(2).getColumnType());
Assert.assertEquals(ColumnEncoding.STRING,mappings.get(3).getEncoding());
Assert.assertEquals(columnTypes.get(3).toString(),mappings.get(3).getColumnType());
Assert.assertEquals(ColumnEncoding.BINARY,mappings.get(4).getEncoding());
Assert.assertEquals(columnTypes.get(4).toString(),mappings.get(4).getColumnType());
}
Class: org.apache.hadoop.hive.accumulo.columns.TestColumnMappingFactory APIUtilityVerifier EqualityVerifier
@Test public void testCaseInsensitiveRowId(){
String mappingStr=":rowid";
ColumnMapping mapping=ColumnMappingFactory.get(mappingStr,ColumnEncoding.getDefault(),"col",TypeInfoFactory.stringTypeInfo);
Assert.assertEquals(HiveAccumuloRowIdColumnMapping.class,mapping.getClass());
mappingStr=":rowid#b";
mapping=ColumnMappingFactory.get(mappingStr,ColumnEncoding.getDefault(),"col",TypeInfoFactory.stringTypeInfo);
Assert.assertEquals(HiveAccumuloRowIdColumnMapping.class,mapping.getClass());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testInlineEncodingOverridesDefault(){
String mappingStr="cf:foo#s";
ColumnMapping mapping=ColumnMappingFactory.get(mappingStr,ColumnEncoding.BINARY,"col",TypeInfoFactory.stringTypeInfo);
Assert.assertEquals(HiveAccumuloColumnMapping.class,mapping.getClass());
HiveAccumuloColumnMapping colMapping=(HiveAccumuloColumnMapping)mapping;
Assert.assertEquals("cf",colMapping.getColumnFamily());
Assert.assertEquals("foo",colMapping.getColumnQualifier());
Assert.assertEquals(ColumnEncoding.STRING,colMapping.getEncoding());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testGetMapWithPrefix(){
String mappingStr="cf:foo*";
ColumnMapping mapping=ColumnMappingFactory.get(mappingStr,ColumnEncoding.getDefault(),"col",TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo));
Assert.assertEquals(HiveAccumuloMapColumnMapping.class,mapping.getClass());
HiveAccumuloMapColumnMapping mapMapping=(HiveAccumuloMapColumnMapping)mapping;
Assert.assertEquals("cf",mapMapping.getColumnFamily());
Assert.assertEquals("foo",mapMapping.getColumnQualifierPrefix());
Assert.assertEquals(ColumnEncoding.getDefault(),mapMapping.getKeyEncoding());
Assert.assertEquals(ColumnEncoding.getDefault(),mapMapping.getValueEncoding());
}
EqualityVerifier
@Test public void testEscapedColumnFamily(){
String cf="c" + '\\' + ":f", cq="cq1:cq2";
Entry pair=ColumnMappingFactory.parseMapping(cf + ":" + cq);
Assert.assertEquals("c:f",pair.getKey());
Assert.assertEquals(cq,pair.getValue());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testPrefixWithEscape(){
String mappingStr="cf:foo\\*bar*";
ColumnMapping mapping=ColumnMappingFactory.get(mappingStr,ColumnEncoding.getDefault(),"col",TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo));
Assert.assertEquals(HiveAccumuloMapColumnMapping.class,mapping.getClass());
HiveAccumuloMapColumnMapping mapMapping=(HiveAccumuloMapColumnMapping)mapping;
Assert.assertEquals("cf",mapMapping.getColumnFamily());
Assert.assertEquals("foo*bar",mapMapping.getColumnQualifierPrefix());
Assert.assertEquals(ColumnEncoding.getDefault(),mapMapping.getKeyEncoding());
Assert.assertEquals(ColumnEncoding.getDefault(),mapMapping.getValueEncoding());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testGetMap(){
String mappingStr="cf:*";
ColumnMapping mapping=ColumnMappingFactory.get(mappingStr,ColumnEncoding.getDefault(),"col",TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo));
Assert.assertEquals(HiveAccumuloMapColumnMapping.class,mapping.getClass());
HiveAccumuloMapColumnMapping mapMapping=(HiveAccumuloMapColumnMapping)mapping;
Assert.assertEquals("cf",mapMapping.getColumnFamily());
Assert.assertEquals("",mapMapping.getColumnQualifierPrefix());
Assert.assertEquals(ColumnEncoding.getDefault(),mapMapping.getKeyEncoding());
Assert.assertEquals(ColumnEncoding.getDefault(),mapMapping.getValueEncoding());
}
EqualityVerifier
@Test public void testColumnMappingWithMultipleColons(){
String cf="cf", cq="cq1:cq2";
Entry pair=ColumnMappingFactory.parseMapping(cf + ":" + cq);
Assert.assertEquals(cf,pair.getKey());
Assert.assertEquals(cq,pair.getValue());
}
InternalCallVerifier EqualityVerifier
@Test public void testRowIdCreatesRowIdMapping(){
ColumnMapping mapping=ColumnMappingFactory.get(AccumuloHiveConstants.ROWID,ColumnEncoding.STRING,"row",TypeInfoFactory.stringTypeInfo);
Assert.assertEquals(HiveAccumuloRowIdColumnMapping.class,mapping.getClass());
Assert.assertEquals("row",mapping.getColumnName());
Assert.assertEquals(TypeInfoFactory.stringTypeInfo.toString(),mapping.getColumnType());
}
InternalCallVerifier EqualityVerifier
@Test public void testColumnMappingCreatesAccumuloColumnMapping(){
ColumnMapping mapping=ColumnMappingFactory.get("cf:cq",ColumnEncoding.STRING,"col",TypeInfoFactory.stringTypeInfo);
Assert.assertEquals(HiveAccumuloColumnMapping.class,mapping.getClass());
Assert.assertEquals("col",mapping.getColumnName());
Assert.assertEquals(TypeInfoFactory.stringTypeInfo.toString(),mapping.getColumnType());
}
EqualityVerifier
@Test public void testEscapedColumnFamilyAndQualifier(){
String cf="c" + '\\' + ":f", cq="cq1\\:cq2";
Entry pair=ColumnMappingFactory.parseMapping(cf + ":" + cq);
Assert.assertEquals("c:f",pair.getKey());
Assert.assertEquals("cq1:cq2",pair.getValue());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testEscapedAsterisk(){
String mappingStr="cf:\\*";
ColumnMapping mapping=ColumnMappingFactory.get(mappingStr,ColumnEncoding.getDefault(),"col",TypeInfoFactory.stringTypeInfo);
Assert.assertEquals(HiveAccumuloColumnMapping.class,mapping.getClass());
HiveAccumuloColumnMapping colMapping=(HiveAccumuloColumnMapping)mapping;
Assert.assertEquals("cf",colMapping.getColumnFamily());
Assert.assertEquals("*",colMapping.getColumnQualifier());
Assert.assertEquals(ColumnEncoding.getDefault(),colMapping.getEncoding());
}
Class: org.apache.hadoop.hive.accumulo.columns.TestHiveAccumuloColumnMapping InternalCallVerifier EqualityVerifier
@Test public void testColumnMappingWithMultipleColons(){
String cf="cf", cq="cq1:cq2";
HiveAccumuloColumnMapping mapping=new HiveAccumuloColumnMapping(cf,cq,ColumnEncoding.STRING,"col",TypeInfoFactory.stringTypeInfo.toString());
Assert.assertEquals(cf,mapping.getColumnFamily());
Assert.assertEquals(cq,mapping.getColumnQualifier());
}
Class: org.apache.hadoop.hive.accumulo.mr.TestHiveAccumuloTableInputFormat BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testGetProtectedField() throws Exception {
FileInputFormat.addInputPath(conf,new Path("unused"));
BatchWriterConfig writerConf=new BatchWriterConfig();
BatchWriter writer=con.createBatchWriter(TEST_TABLE,writerConf);
Authorizations origAuths=con.securityOperations().getUserAuthorizations(USER);
con.securityOperations().changeUserAuthorizations(USER,new Authorizations(origAuths.toString() + ",foo"));
Mutation m=new Mutation("r4");
m.put(COLUMN_FAMILY,NAME,new ColumnVisibility("foo"),new Value("frank".getBytes()));
m.put(COLUMN_FAMILY,SID,new ColumnVisibility("foo"),new Value(parseIntBytes("4")));
m.put(COLUMN_FAMILY,DEGREES,new ColumnVisibility("foo"),new Value(parseDoubleBytes("60.6")));
m.put(COLUMN_FAMILY,MILLIS,new ColumnVisibility("foo"),new Value(parseLongBytes("777")));
writer.addMutation(m);
writer.close();
conf.set(AccumuloSerDeParameters.AUTHORIZATIONS_KEY,"foo");
InputSplit[] splits=inputformat.getSplits(conf,0);
assertEquals(splits.length,1);
RecordReader reader=inputformat.getRecordReader(splits[0],conf,null);
Text rowId=new Text("r1");
AccumuloHiveRow row=new AccumuloHiveRow();
assertTrue(reader.next(rowId,row));
assertEquals(row.getRowId(),rowId.toString());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY,NAME));
assertArrayEquals(row.getValue(COLUMN_FAMILY,NAME),"brian".getBytes());
rowId=new Text("r2");
assertTrue(reader.next(rowId,row));
assertEquals(row.getRowId(),rowId.toString());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY,NAME));
assertArrayEquals(row.getValue(COLUMN_FAMILY,NAME),"mark".getBytes());
rowId=new Text("r3");
assertTrue(reader.next(rowId,row));
assertEquals(row.getRowId(),rowId.toString());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY,NAME));
assertArrayEquals(row.getValue(COLUMN_FAMILY,NAME),"dennis".getBytes());
rowId=new Text("r4");
assertTrue(reader.next(rowId,row));
assertEquals(row.getRowId(),rowId.toString());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY,NAME));
assertArrayEquals(row.getValue(COLUMN_FAMILY,NAME),"frank".getBytes());
assertFalse(reader.next(rowId,row));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testMapColumnPairs() throws TooManyAccumuloColumnsException {
ColumnMapper columnMapper=new ColumnMapper(":rowID,cf:*",conf.get(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE),Arrays.asList("row","col"),Arrays.asList(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo)));
Set> pairs=inputformat.getPairCollection(columnMapper.getColumnMappings());
Assert.assertEquals(1,pairs.size());
Pair cfCq=pairs.iterator().next();
Assert.assertEquals("cf",cfCq.getFirst().toString());
Assert.assertNull(cfCq.getSecond());
}
BranchVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testNameEqualBrian() throws Exception {
Connector con=mockInstance.getConnector(USER,new PasswordToken(PASS.getBytes()));
Scanner scan=con.createScanner(TEST_TABLE,new Authorizations("blah"));
IteratorSetting is=new IteratorSetting(1,PrimitiveComparisonFilter.FILTER_PREFIX + 1,PrimitiveComparisonFilter.class);
is.addOption(PrimitiveComparisonFilter.P_COMPARE_CLASS,StringCompare.class.getName());
is.addOption(PrimitiveComparisonFilter.COMPARE_OPT_CLASS,Equal.class.getName());
is.addOption(PrimitiveComparisonFilter.CONST_VAL,new String(Base64.encodeBase64("brian".getBytes())));
is.addOption(PrimitiveComparisonFilter.COLUMN,"cf:name");
scan.addScanIterator(is);
boolean foundName=false;
boolean foundSid=false;
boolean foundDegrees=false;
boolean foundMillis=false;
for ( Map.Entry kv : scan) {
SortedMap items=PrimitiveComparisonFilter.decodeRow(kv.getKey(),kv.getValue());
for ( Map.Entry item : items.entrySet()) {
assertEquals(item.getKey().getRow().toString(),"r1");
if (item.getKey().getColumnQualifier().equals(NAME)) {
foundName=true;
assertArrayEquals(item.getValue().get(),"brian".getBytes());
}
else if (item.getKey().getColumnQualifier().equals(SID)) {
foundSid=true;
assertArrayEquals(item.getValue().get(),parseIntBytes("1"));
}
else if (item.getKey().getColumnQualifier().equals(DEGREES)) {
foundDegrees=true;
assertArrayEquals(item.getValue().get(),parseDoubleBytes("44.5"));
}
else if (item.getKey().getColumnQualifier().equals(MILLIS)) {
foundMillis=true;
assertArrayEquals(item.getValue().get(),parseLongBytes("555"));
}
}
}
assertTrue(foundDegrees & foundMillis & foundName& foundSid);
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testGreaterThan1Sid() throws Exception {
Connector con=mockInstance.getConnector(USER,new PasswordToken(PASS.getBytes()));
Scanner scan=con.createScanner(TEST_TABLE,new Authorizations("blah"));
IteratorSetting is=new IteratorSetting(1,PrimitiveComparisonFilter.FILTER_PREFIX + 1,PrimitiveComparisonFilter.class);
is.addOption(PrimitiveComparisonFilter.P_COMPARE_CLASS,IntCompare.class.getName());
is.addOption(PrimitiveComparisonFilter.COMPARE_OPT_CLASS,GreaterThan.class.getName());
is.addOption(PrimitiveComparisonFilter.CONST_VAL,new String(Base64.encodeBase64(parseIntBytes("1"))));
is.addOption(PrimitiveComparisonFilter.COLUMN,"cf:sid");
scan.addScanIterator(is);
boolean foundMark=false;
boolean foundDennis=false;
int totalCount=0;
for ( Map.Entry kv : scan) {
boolean foundName=false;
boolean foundSid=false;
boolean foundDegrees=false;
boolean foundMillis=false;
SortedMap items=PrimitiveComparisonFilter.decodeRow(kv.getKey(),kv.getValue());
for ( Map.Entry item : items.entrySet()) {
if (item.getKey().getRow().toString().equals("r2")) {
foundMark=true;
}
else if (item.getKey().getRow().toString().equals("r3")) {
foundDennis=true;
}
if (item.getKey().getColumnQualifier().equals(NAME)) {
foundName=true;
}
else if (item.getKey().getColumnQualifier().equals(SID)) {
foundSid=true;
}
else if (item.getKey().getColumnQualifier().equals(DEGREES)) {
foundDegrees=true;
}
else if (item.getKey().getColumnQualifier().equals(MILLIS)) {
foundMillis=true;
}
}
totalCount++;
assertTrue(foundDegrees & foundMillis & foundName& foundSid);
}
assertTrue(foundDennis & foundMark);
assertEquals(totalCount,2);
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testHiveAccumuloRecord() throws Exception {
FileInputFormat.addInputPath(conf,new Path("unused"));
InputSplit[] splits=inputformat.getSplits(conf,0);
assertEquals(splits.length,1);
RecordReader reader=inputformat.getRecordReader(splits[0],conf,null);
Text rowId=new Text("r1");
AccumuloHiveRow row=new AccumuloHiveRow();
row.add(COLUMN_FAMILY.toString(),NAME.toString(),"brian".getBytes());
row.add(COLUMN_FAMILY.toString(),SID.toString(),parseIntBytes("1"));
row.add(COLUMN_FAMILY.toString(),DEGREES.toString(),parseDoubleBytes("44.5"));
row.add(COLUMN_FAMILY.toString(),MILLIS.toString(),parseLongBytes("555"));
assertTrue(reader.next(rowId,row));
assertEquals(rowId.toString(),row.getRowId());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY,NAME));
assertArrayEquals("brian".getBytes(),row.getValue(COLUMN_FAMILY,NAME));
assertTrue(row.hasFamAndQual(COLUMN_FAMILY,SID));
assertArrayEquals(parseIntBytes("1"),row.getValue(COLUMN_FAMILY,SID));
assertTrue(row.hasFamAndQual(COLUMN_FAMILY,DEGREES));
assertArrayEquals(parseDoubleBytes("44.5"),row.getValue(COLUMN_FAMILY,DEGREES));
assertTrue(row.hasFamAndQual(COLUMN_FAMILY,MILLIS));
assertArrayEquals(parseLongBytes("555"),row.getValue(COLUMN_FAMILY,MILLIS));
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testGetNone() throws Exception {
FileInputFormat.addInputPath(conf,new Path("unused"));
conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS,"cf:f1");
InputSplit[] splits=inputformat.getSplits(conf,0);
assertEquals(splits.length,1);
RecordReader reader=inputformat.getRecordReader(splits[0],conf,null);
Text rowId=new Text("r1");
AccumuloHiveRow row=new AccumuloHiveRow();
row.setRowId("r1");
assertFalse(reader.next(rowId,row));
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testGetOnlyName() throws Exception {
FileInputFormat.addInputPath(conf,new Path("unused"));
InputSplit[] splits=inputformat.getSplits(conf,0);
assertEquals(splits.length,1);
RecordReader reader=inputformat.getRecordReader(splits[0],conf,null);
Text rowId=new Text("r1");
AccumuloHiveRow row=new AccumuloHiveRow();
assertTrue(reader.next(rowId,row));
assertEquals(row.getRowId(),rowId.toString());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY,NAME));
assertArrayEquals(row.getValue(COLUMN_FAMILY,NAME),"brian".getBytes());
rowId=new Text("r2");
assertTrue(reader.next(rowId,row));
assertEquals(row.getRowId(),rowId.toString());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY,NAME));
assertArrayEquals(row.getValue(COLUMN_FAMILY,NAME),"mark".getBytes());
rowId=new Text("r3");
assertTrue(reader.next(rowId,row));
assertEquals(row.getRowId(),rowId.toString());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY,NAME));
assertArrayEquals(row.getValue(COLUMN_FAMILY,NAME),"dennis".getBytes());
assertFalse(reader.next(rowId,row));
}
InternalCallVerifier EqualityVerifier
@Test public void testIteratorNotInSplitsCompensation() throws Exception {
FileInputFormat.addInputPath(conf,new Path("unused"));
InputSplit[] splits=inputformat.getSplits(conf,0);
assertEquals(1,splits.length);
InputSplit split=splits[0];
IteratorSetting is=new IteratorSetting(1,PrimitiveComparisonFilter.FILTER_PREFIX + 1,PrimitiveComparisonFilter.class);
is.addOption(PrimitiveComparisonFilter.P_COMPARE_CLASS,StringCompare.class.getName());
is.addOption(PrimitiveComparisonFilter.COMPARE_OPT_CLASS,Equal.class.getName());
is.addOption(PrimitiveComparisonFilter.CONST_VAL,new String(Base64.encodeBase64(new byte[]{'0'})));
is.addOption(PrimitiveComparisonFilter.COLUMN,"cf:cq");
AccumuloPredicateHandler predicateHandler=Mockito.mock(AccumuloPredicateHandler.class);
Mockito.when(predicateHandler.getIterators(Mockito.any(JobConf.class),Mockito.any(ColumnMapper.class))).thenReturn(Arrays.asList(is));
inputformat.predicateHandler=predicateHandler;
inputformat.getRecordReader(split,conf,null);
List settingsOnSplit=((HiveAccumuloSplit)split).getSplit().getIterators();
assertEquals(1,settingsOnSplit.size());
assertEquals(is,settingsOnSplit.get(0));
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testDegreesAndMillis() throws Exception {
Connector con=mockInstance.getConnector(USER,new PasswordToken(PASS.getBytes()));
Scanner scan=con.createScanner(TEST_TABLE,new Authorizations("blah"));
IteratorSetting is=new IteratorSetting(1,PrimitiveComparisonFilter.FILTER_PREFIX + 1,PrimitiveComparisonFilter.class);
is.addOption(PrimitiveComparisonFilter.P_COMPARE_CLASS,DoubleCompare.class.getName());
is.addOption(PrimitiveComparisonFilter.COMPARE_OPT_CLASS,GreaterThanOrEqual.class.getName());
is.addOption(PrimitiveComparisonFilter.CONST_VAL,new String(Base64.encodeBase64(parseDoubleBytes("55.6"))));
is.addOption(PrimitiveComparisonFilter.COLUMN,"cf:dgrs");
scan.addScanIterator(is);
IteratorSetting is2=new IteratorSetting(2,PrimitiveComparisonFilter.FILTER_PREFIX + 2,PrimitiveComparisonFilter.class);
is2.addOption(PrimitiveComparisonFilter.P_COMPARE_CLASS,LongCompare.class.getName());
is2.addOption(PrimitiveComparisonFilter.COMPARE_OPT_CLASS,LessThan.class.getName());
is2.addOption(PrimitiveComparisonFilter.CONST_VAL,new String(Base64.encodeBase64(parseLongBytes("778"))));
is2.addOption(PrimitiveComparisonFilter.COLUMN,"cf:mills");
scan.addScanIterator(is2);
boolean foundDennis=false;
int totalCount=0;
for ( Map.Entry kv : scan) {
boolean foundName=false;
boolean foundSid=false;
boolean foundDegrees=false;
boolean foundMillis=false;
SortedMap items=PrimitiveComparisonFilter.decodeRow(kv.getKey(),kv.getValue());
for ( Map.Entry item : items.entrySet()) {
SortedMap nestedItems=PrimitiveComparisonFilter.decodeRow(item.getKey(),item.getValue());
for ( Map.Entry nested : nestedItems.entrySet()) {
if (nested.getKey().getRow().toString().equals("r3")) {
foundDennis=true;
}
if (nested.getKey().getColumnQualifier().equals(NAME)) {
foundName=true;
}
else if (nested.getKey().getColumnQualifier().equals(SID)) {
foundSid=true;
}
else if (nested.getKey().getColumnQualifier().equals(DEGREES)) {
foundDegrees=true;
}
else if (nested.getKey().getColumnQualifier().equals(MILLIS)) {
foundMillis=true;
}
}
}
totalCount++;
assertTrue(foundDegrees & foundMillis & foundName& foundSid);
}
assertTrue(foundDennis);
assertEquals(totalCount,1);
}
EqualityVerifier
@Test public void testColumnMappingsToPairs(){
List mappings=new ArrayList();
Set> columns=new HashSet>();
mappings.add(new HiveAccumuloRowIdColumnMapping(AccumuloHiveConstants.ROWID,ColumnEncoding.STRING,"row",TypeInfoFactory.stringTypeInfo.toString()));
mappings.add(new HiveAccumuloColumnMapping("person","name",ColumnEncoding.STRING,"col1",TypeInfoFactory.stringTypeInfo.toString()));
mappings.add(new HiveAccumuloColumnMapping("person","age",ColumnEncoding.STRING,"col2",TypeInfoFactory.stringTypeInfo.toString()));
mappings.add(new HiveAccumuloColumnMapping("person","height",ColumnEncoding.STRING,"col3",TypeInfoFactory.stringTypeInfo.toString()));
mappings.add(new HiveAccumuloColumnMapping("city","name",ColumnEncoding.STRING,"col4",TypeInfoFactory.stringTypeInfo.toString()));
columns.add(new Pair(new Text("person"),new Text("name")));
columns.add(new Pair(new Text("person"),new Text("age")));
columns.add(new Pair(new Text("person"),new Text("height")));
columns.add(new Pair(new Text("city"),new Text("name")));
assertEquals(columns,inputformat.getPairCollection(mappings));
}
Class: org.apache.hadoop.hive.accumulo.mr.TestHiveAccumuloTableOutputFormat APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testBinarySerializationOnStringFallsBackToUtf8() throws Exception {
Instance inst=new MockInstance(test.getMethodName());
Connector conn=inst.getConnector("root",new PasswordToken(""));
HiveAccumuloTableOutputFormat outputFormat=new HiveAccumuloTableOutputFormat();
String table=test.getMethodName();
conn.tableOperations().create(table);
JobConf conf=new JobConf();
conf.set(AccumuloConnectionParameters.INSTANCE_NAME,inst.getInstanceName());
conf.set(AccumuloConnectionParameters.USER_NAME,"root");
conf.set(AccumuloConnectionParameters.USER_PASS,"");
conf.setBoolean(AccumuloConnectionParameters.USE_MOCK_INSTANCE,true);
conf.set(AccumuloConnectionParameters.TABLE_NAME,test.getMethodName());
FileSystem local=FileSystem.getLocal(conf);
outputFormat.checkOutputSpecs(local,conf);
RecordWriter recordWriter=outputFormat.getRecordWriter(local,conf,null,null);
List names=Arrays.asList("row","col1","col2");
List types=Arrays.asList(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo);
Properties tableProperties=new Properties();
tableProperties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,":rowID,cf:cq1,cf:cq2");
tableProperties.setProperty(serdeConstants.FIELD_DELIM," ");
tableProperties.setProperty(serdeConstants.LIST_COLUMNS,Joiner.on(',').join(names));
tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES,Joiner.on(',').join(types));
tableProperties.setProperty(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE,ColumnEncoding.BINARY.getName());
AccumuloSerDeParameters accumuloSerDeParams=new AccumuloSerDeParameters(new Configuration(),tableProperties,AccumuloSerDe.class.getSimpleName());
LazySerDeParameters serDeParams=accumuloSerDeParams.getSerDeParameters();
AccumuloRowSerializer serializer=new AccumuloRowSerializer(0,serDeParams,accumuloSerDeParams.getColumnMappings(),AccumuloSerDeParameters.DEFAULT_VISIBILITY_LABEL,accumuloSerDeParams.getRowIdFactory());
TypeInfo stringTypeInfo=TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME);
LazySimpleStructObjectInspector structOI=(LazySimpleStructObjectInspector)LazyFactory.createLazyStructInspector(Arrays.asList("row","cq1","cq2"),Arrays.asList(stringTypeInfo,stringTypeInfo,stringTypeInfo),serDeParams.getSeparators(),serDeParams.getNullSequence(),serDeParams.isLastColumnTakesRest(),serDeParams.isEscaped(),serDeParams.getEscapeChar());
LazyStruct struct=(LazyStruct)LazyFactory.createLazyObject(structOI);
ByteArrayRef bytes=new ByteArrayRef();
bytes.setData("row value1 value2".getBytes());
struct.init(bytes,0,bytes.getData().length);
Mutation m=serializer.serialize(struct,structOI);
recordWriter.write(new Text(table),m);
recordWriter.close(null);
Iterator> iter=conn.createScanner(table,new Authorizations()).iterator();
Assert.assertTrue("Iterator did not have an element as expected",iter.hasNext());
Entry entry=iter.next();
Key k=entry.getKey();
Value v=entry.getValue();
Assert.assertEquals("row",k.getRow().toString());
Assert.assertEquals("cf",k.getColumnFamily().toString());
Assert.assertEquals("cq1",k.getColumnQualifier().toString());
Assert.assertEquals("",k.getColumnVisibility().toString());
Assert.assertEquals("value1",new String(v.get()));
Assert.assertTrue("Iterator did not have an element as expected",iter.hasNext());
entry=iter.next();
k=entry.getKey();
v=entry.getValue();
Assert.assertEquals("row",k.getRow().toString());
Assert.assertEquals("cf",k.getColumnFamily().toString());
Assert.assertEquals("cq2",k.getColumnQualifier().toString());
Assert.assertEquals("",k.getColumnVisibility().toString());
Assert.assertEquals("value2",new String(v.get()));
Assert.assertFalse("Iterator unexpectedly had more data",iter.hasNext());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@SuppressWarnings({"rawtypes","unchecked"}) @Test public void testSaslConfiguration() throws IOException, AccumuloException, AccumuloSecurityException {
final HiveAccumuloTableOutputFormat outputFormat=Mockito.mock(HiveAccumuloTableOutputFormat.class);
final AuthenticationToken authToken=Mockito.mock(AuthenticationToken.class);
final Token hadoopToken=Mockito.mock(Token.class);
final HiveAccumuloHelper helper=Mockito.mock(HiveAccumuloHelper.class);
final AccumuloConnectionParameters cnxnParams=Mockito.mock(AccumuloConnectionParameters.class);
final Connector connector=Mockito.mock(Connector.class);
conf.set("hadoop.security.authentication","kerberos");
UserGroupInformation.setConfiguration(conf);
UserGroupInformation user1=UserGroupInformation.createUserForTesting(user,new String[0]);
Mockito.when(outputFormat.getCurrentUser()).thenReturn(user1);
conf.unset(AccumuloConnectionParameters.USER_PASS);
Mockito.doCallRealMethod().when(outputFormat).configureAccumuloOutputFormat(conf);
Mockito.when(outputFormat.getHelper()).thenReturn(helper);
Mockito.when(outputFormat.getConnectionParams(conf)).thenReturn(cnxnParams);
Mockito.when(cnxnParams.getConnector()).thenReturn(connector);
Mockito.when(helper.getDelegationToken(connector)).thenReturn(authToken);
Mockito.when(helper.getHadoopToken(authToken)).thenReturn(hadoopToken);
Mockito.when(cnxnParams.useSasl()).thenReturn(true);
Mockito.when(cnxnParams.getAccumuloUserName()).thenReturn(user);
Mockito.when(cnxnParams.getAccumuloInstanceName()).thenReturn(instanceName);
Mockito.when(cnxnParams.getZooKeepers()).thenReturn(zookeepers);
Mockito.when(outputFormat.hasKerberosCredentials(user1)).thenReturn(true);
outputFormat.configureAccumuloOutputFormat(conf);
Mockito.verify(outputFormat).setZooKeeperInstanceWithErrorChecking(conf,instanceName,zookeepers,true);
Mockito.verify(outputFormat).setConnectorInfoWithErrorChecking(conf,user,authToken);
Mockito.verify(outputFormat).setDefaultAccumuloTableName(conf,outputTable);
Mockito.verify(helper).mergeTokenIntoJobConf(conf,hadoopToken);
Collection> tokens=user1.getTokens();
Assert.assertEquals(1,tokens.size());
Assert.assertEquals(hadoopToken,tokens.iterator().next());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testWriteToMockInstance() throws Exception {
Instance inst=new MockInstance(test.getMethodName());
Connector conn=inst.getConnector("root",new PasswordToken(""));
HiveAccumuloTableOutputFormat outputFormat=new HiveAccumuloTableOutputFormat();
String table=test.getMethodName();
conn.tableOperations().create(table);
JobConf conf=new JobConf();
conf.set(AccumuloConnectionParameters.INSTANCE_NAME,inst.getInstanceName());
conf.set(AccumuloConnectionParameters.USER_NAME,"root");
conf.set(AccumuloConnectionParameters.USER_PASS,"");
conf.setBoolean(AccumuloConnectionParameters.USE_MOCK_INSTANCE,true);
conf.set(AccumuloConnectionParameters.TABLE_NAME,test.getMethodName());
FileSystem local=FileSystem.getLocal(conf);
outputFormat.checkOutputSpecs(local,conf);
RecordWriter recordWriter=outputFormat.getRecordWriter(local,conf,null,null);
List names=Arrays.asList("row","col1","col2");
List types=Arrays.asList(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo);
Properties tableProperties=new Properties();
tableProperties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,":rowID,cf:cq1,cf:cq2");
tableProperties.setProperty(serdeConstants.FIELD_DELIM," ");
tableProperties.setProperty(serdeConstants.LIST_COLUMNS,Joiner.on(',').join(names));
tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES,Joiner.on(',').join(types));
AccumuloSerDeParameters accumuloSerDeParams=new AccumuloSerDeParameters(new Configuration(),tableProperties,AccumuloSerDe.class.getSimpleName());
LazySerDeParameters serDeParams=accumuloSerDeParams.getSerDeParameters();
AccumuloRowSerializer serializer=new AccumuloRowSerializer(0,serDeParams,accumuloSerDeParams.getColumnMappings(),AccumuloSerDeParameters.DEFAULT_VISIBILITY_LABEL,accumuloSerDeParams.getRowIdFactory());
TypeInfo stringTypeInfo=TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME);
LazySimpleStructObjectInspector structOI=(LazySimpleStructObjectInspector)LazyFactory.createLazyStructInspector(Arrays.asList("row","cq1","cq2"),Arrays.asList(stringTypeInfo,stringTypeInfo,stringTypeInfo),serDeParams.getSeparators(),serDeParams.getNullSequence(),serDeParams.isLastColumnTakesRest(),serDeParams.isEscaped(),serDeParams.getEscapeChar());
LazyStruct struct=(LazyStruct)LazyFactory.createLazyObject(structOI);
ByteArrayRef bytes=new ByteArrayRef();
bytes.setData("row value1 value2".getBytes());
struct.init(bytes,0,bytes.getData().length);
Mutation m=serializer.serialize(struct,structOI);
recordWriter.write(new Text(table),m);
recordWriter.close(null);
Iterator> iter=conn.createScanner(table,new Authorizations()).iterator();
Assert.assertTrue("Iterator did not have an element as expected",iter.hasNext());
Entry entry=iter.next();
Key k=entry.getKey();
Value v=entry.getValue();
Assert.assertEquals("row",k.getRow().toString());
Assert.assertEquals("cf",k.getColumnFamily().toString());
Assert.assertEquals("cq1",k.getColumnQualifier().toString());
Assert.assertEquals("",k.getColumnVisibility().toString());
Assert.assertEquals("value1",new String(v.get()));
Assert.assertTrue("Iterator did not have an element as expected",iter.hasNext());
entry=iter.next();
k=entry.getKey();
v=entry.getValue();
Assert.assertEquals("row",k.getRow().toString());
Assert.assertEquals("cf",k.getColumnFamily().toString());
Assert.assertEquals("cq2",k.getColumnQualifier().toString());
Assert.assertEquals("",k.getColumnVisibility().toString());
Assert.assertEquals("value2",new String(v.get()));
Assert.assertFalse("Iterator unexpectedly had more data",iter.hasNext());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testWriteToMockInstanceWithVisibility() throws Exception {
Instance inst=new MockInstance(test.getMethodName());
Connector conn=inst.getConnector("root",new PasswordToken(""));
Authorizations auths=new Authorizations("foo");
conn.securityOperations().changeUserAuthorizations("root",auths);
HiveAccumuloTableOutputFormat outputFormat=new HiveAccumuloTableOutputFormat();
String table=test.getMethodName();
conn.tableOperations().create(table);
JobConf conf=new JobConf();
conf.set(AccumuloConnectionParameters.INSTANCE_NAME,inst.getInstanceName());
conf.set(AccumuloConnectionParameters.USER_NAME,"root");
conf.set(AccumuloConnectionParameters.USER_PASS,"");
conf.setBoolean(AccumuloConnectionParameters.USE_MOCK_INSTANCE,true);
conf.set(AccumuloConnectionParameters.TABLE_NAME,test.getMethodName());
FileSystem local=FileSystem.getLocal(conf);
outputFormat.checkOutputSpecs(local,conf);
RecordWriter recordWriter=outputFormat.getRecordWriter(local,conf,null,null);
List names=Arrays.asList("row","col1","col2");
List types=Arrays.asList(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo);
Properties tableProperties=new Properties();
tableProperties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,":rowID,cf:cq1,cf:cq2");
tableProperties.setProperty(serdeConstants.FIELD_DELIM," ");
tableProperties.setProperty(serdeConstants.LIST_COLUMNS,Joiner.on(',').join(names));
tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES,Joiner.on(',').join(types));
AccumuloSerDeParameters accumuloSerDeParams=new AccumuloSerDeParameters(new Configuration(),tableProperties,AccumuloSerDe.class.getSimpleName());
LazySerDeParameters serDeParams=accumuloSerDeParams.getSerDeParameters();
AccumuloRowSerializer serializer=new AccumuloRowSerializer(0,serDeParams,accumuloSerDeParams.getColumnMappings(),new ColumnVisibility("foo"),accumuloSerDeParams.getRowIdFactory());
LazySimpleStructObjectInspector structOI=(LazySimpleStructObjectInspector)LazyFactory.createLazyStructInspector(Arrays.asList("row","cq1","cq2"),Arrays.asList(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo),serDeParams.getSeparators(),serDeParams.getNullSequence(),serDeParams.isLastColumnTakesRest(),serDeParams.isEscaped(),serDeParams.getEscapeChar());
LazyStruct struct=(LazyStruct)LazyFactory.createLazyObject(structOI);
ByteArrayRef bytes=new ByteArrayRef();
bytes.setData("row value1 value2".getBytes());
struct.init(bytes,0,bytes.getData().length);
Mutation m=serializer.serialize(struct,structOI);
recordWriter.write(new Text(table),m);
recordWriter.close(null);
Iterator> iter=conn.createScanner(table,auths).iterator();
Assert.assertTrue("Iterator did not have an element as expected",iter.hasNext());
Entry entry=iter.next();
Key k=entry.getKey();
Value v=entry.getValue();
Assert.assertEquals("row",k.getRow().toString());
Assert.assertEquals("cf",k.getColumnFamily().toString());
Assert.assertEquals("cq1",k.getColumnQualifier().toString());
Assert.assertEquals("foo",k.getColumnVisibility().toString());
Assert.assertEquals("value1",new String(v.get()));
Assert.assertTrue("Iterator did not have an element as expected",iter.hasNext());
entry=iter.next();
k=entry.getKey();
v=entry.getValue();
Assert.assertEquals("row",k.getRow().toString());
Assert.assertEquals("cf",k.getColumnFamily().toString());
Assert.assertEquals("cq2",k.getColumnQualifier().toString());
Assert.assertEquals("foo",k.getColumnVisibility().toString());
Assert.assertEquals("value2",new String(v.get()));
Assert.assertFalse("Iterator unexpectedly had more data",iter.hasNext());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testWriteMap() throws Exception {
Instance inst=new MockInstance(test.getMethodName());
Connector conn=inst.getConnector("root",new PasswordToken(""));
HiveAccumuloTableOutputFormat outputFormat=new HiveAccumuloTableOutputFormat();
String table=test.getMethodName();
conn.tableOperations().create(table);
JobConf conf=new JobConf();
conf.set(AccumuloConnectionParameters.INSTANCE_NAME,inst.getInstanceName());
conf.set(AccumuloConnectionParameters.USER_NAME,"root");
conf.set(AccumuloConnectionParameters.USER_PASS,"");
conf.setBoolean(AccumuloConnectionParameters.USE_MOCK_INSTANCE,true);
conf.set(AccumuloConnectionParameters.TABLE_NAME,test.getMethodName());
FileSystem local=FileSystem.getLocal(conf);
outputFormat.checkOutputSpecs(local,conf);
RecordWriter recordWriter=outputFormat.getRecordWriter(local,conf,null,null);
List names=Arrays.asList("row","col1");
List types=Arrays.asList(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo);
Properties tableProperties=new Properties();
tableProperties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,":rowID,cf:*");
tableProperties.setProperty(serdeConstants.FIELD_DELIM," ");
tableProperties.setProperty(serdeConstants.LIST_COLUMNS,Joiner.on(',').join(names));
tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES,Joiner.on(',').join(types));
AccumuloSerDeParameters accumuloSerDeParams=new AccumuloSerDeParameters(new Configuration(),tableProperties,AccumuloSerDe.class.getSimpleName());
LazySerDeParameters serDeParams=accumuloSerDeParams.getSerDeParameters();
AccumuloRowSerializer serializer=new AccumuloRowSerializer(0,serDeParams,accumuloSerDeParams.getColumnMappings(),AccumuloSerDeParameters.DEFAULT_VISIBILITY_LABEL,accumuloSerDeParams.getRowIdFactory());
TypeInfo stringTypeInfo=TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME);
LazyStringObjectInspector stringOI=(LazyStringObjectInspector)LazyFactory.createLazyObjectInspector(stringTypeInfo,new byte[]{0},0,serDeParams.getNullSequence(),serDeParams.isEscaped(),serDeParams.getEscapeChar());
LazyMapObjectInspector mapOI=LazyObjectInspectorFactory.getLazySimpleMapObjectInspector(stringOI,stringOI,(byte)',',(byte)':',serDeParams.getNullSequence(),serDeParams.isEscaped(),serDeParams.getEscapeChar());
LazySimpleStructObjectInspector structOI=(LazySimpleStructObjectInspector)LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(Arrays.asList("row","data"),Arrays.asList(stringOI,mapOI),(byte)' ',serDeParams.getNullSequence(),serDeParams.isLastColumnTakesRest(),serDeParams.isEscaped(),serDeParams.getEscapeChar());
LazyStruct struct=(LazyStruct)LazyFactory.createLazyObject(structOI);
ByteArrayRef bytes=new ByteArrayRef();
bytes.setData("row cq1:value1,cq2:value2".getBytes());
struct.init(bytes,0,bytes.getData().length);
Mutation m=serializer.serialize(struct,structOI);
recordWriter.write(new Text(table),m);
recordWriter.close(null);
Iterator> iter=conn.createScanner(table,new Authorizations()).iterator();
Assert.assertTrue("Iterator did not have an element as expected",iter.hasNext());
Entry entry=iter.next();
Key k=entry.getKey();
Value v=entry.getValue();
Assert.assertEquals("row",k.getRow().toString());
Assert.assertEquals("cf",k.getColumnFamily().toString());
Assert.assertEquals("cq1",k.getColumnQualifier().toString());
Assert.assertEquals(AccumuloSerDeParameters.DEFAULT_VISIBILITY_LABEL,k.getColumnVisibilityParsed());
Assert.assertEquals("value1",new String(v.get()));
Assert.assertTrue("Iterator did not have an element as expected",iter.hasNext());
entry=iter.next();
k=entry.getKey();
v=entry.getValue();
Assert.assertEquals("row",k.getRow().toString());
Assert.assertEquals("cf",k.getColumnFamily().toString());
Assert.assertEquals("cq2",k.getColumnQualifier().toString());
Assert.assertEquals(AccumuloSerDeParameters.DEFAULT_VISIBILITY_LABEL,k.getColumnVisibilityParsed());
Assert.assertEquals("value2",new String(v.get()));
Assert.assertFalse("Iterator unexpectedly had more data",iter.hasNext());
}
Class: org.apache.hadoop.hive.accumulo.mr.TestHiveAccumuloTypes APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testUtf8Types() throws Exception {
final String tableName=test.getMethodName(), user="root", pass="";
MockInstance mockInstance=new MockInstance(test.getMethodName());
Connector conn=mockInstance.getConnector(user,new PasswordToken(pass));
HiveAccumuloTableInputFormat inputformat=new HiveAccumuloTableInputFormat();
JobConf conf=new JobConf();
conf.set(AccumuloSerDeParameters.TABLE_NAME,tableName);
conf.set(AccumuloSerDeParameters.USE_MOCK_INSTANCE,"true");
conf.set(AccumuloSerDeParameters.INSTANCE_NAME,test.getMethodName());
conf.set(AccumuloSerDeParameters.USER_NAME,user);
conf.set(AccumuloSerDeParameters.USER_PASS,pass);
conf.set(AccumuloSerDeParameters.ZOOKEEPERS,"localhost:2181");
conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS,AccumuloHiveConstants.ROWID + ",cf:string,cf:boolean,cf:tinyint,cf:smallint,cf:int,cf:bigint" + ",cf:float,cf:double,cf:decimal,cf:date,cf:timestamp,cf:char,cf:varchar");
conf.set(serdeConstants.LIST_COLUMNS,"string,string,boolean,tinyint,smallint,int,bigint,float,double,decimal,date,timestamp,char(4),varchar(7)");
conf.set(serdeConstants.LIST_COLUMN_TYPES,"string,string,boolean,tinyint,smallint,int,bigint,float,double,decimal,date,timestamp,char(4),varchar(7)");
conn.tableOperations().create(tableName);
BatchWriterConfig writerConf=new BatchWriterConfig();
BatchWriter writer=conn.createBatchWriter(tableName,writerConf);
ByteArrayOutputStream baos=new ByteArrayOutputStream();
String cf="cf";
byte[] cfBytes=cf.getBytes();
ByteArrayRef byteRef=new ByteArrayRef();
Mutation m=new Mutation("row1");
String stringValue="string";
baos.reset();
JavaStringObjectInspector stringOI=(JavaStringObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME));
LazyUtils.writePrimitiveUTF8(baos,stringOI.create(stringValue),stringOI,false,(byte)0,null);
m.put(cfBytes,"string".getBytes(),baos.toByteArray());
boolean booleanValue=true;
baos.reset();
JavaBooleanObjectInspector booleanOI=(JavaBooleanObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BOOLEAN_TYPE_NAME));
LazyUtils.writePrimitiveUTF8(baos,booleanOI.create(booleanValue),booleanOI,false,(byte)0,null);
m.put(cfBytes,"boolean".getBytes(),baos.toByteArray());
byte tinyintValue=-127;
baos.reset();
JavaByteObjectInspector byteOI=(JavaByteObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TINYINT_TYPE_NAME));
LazyUtils.writePrimitiveUTF8(baos,tinyintValue,byteOI,false,(byte)0,null);
m.put(cfBytes,"tinyint".getBytes(),baos.toByteArray());
short smallintValue=Short.MAX_VALUE;
baos.reset();
JavaShortObjectInspector shortOI=(JavaShortObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.SMALLINT_TYPE_NAME));
LazyUtils.writePrimitiveUTF8(baos,smallintValue,shortOI,false,(byte)0,null);
m.put(cfBytes,"smallint".getBytes(),baos.toByteArray());
int intValue=Integer.MAX_VALUE;
baos.reset();
JavaIntObjectInspector intOI=(JavaIntObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INT_TYPE_NAME));
LazyUtils.writePrimitiveUTF8(baos,intValue,intOI,false,(byte)0,null);
m.put(cfBytes,"int".getBytes(),baos.toByteArray());
long bigintValue=Long.MAX_VALUE;
baos.reset();
JavaLongObjectInspector longOI=(JavaLongObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BIGINT_TYPE_NAME));
LazyUtils.writePrimitiveUTF8(baos,bigintValue,longOI,false,(byte)0,null);
m.put(cfBytes,"bigint".getBytes(),baos.toByteArray());
float floatValue=Float.MAX_VALUE;
baos.reset();
JavaFloatObjectInspector floatOI=(JavaFloatObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.FLOAT_TYPE_NAME));
LazyUtils.writePrimitiveUTF8(baos,floatValue,floatOI,false,(byte)0,null);
m.put(cfBytes,"float".getBytes(),baos.toByteArray());
double doubleValue=Double.MAX_VALUE;
baos.reset();
JavaDoubleObjectInspector doubleOI=(JavaDoubleObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.DOUBLE_TYPE_NAME));
LazyUtils.writePrimitiveUTF8(baos,doubleValue,doubleOI,false,(byte)0,null);
m.put(cfBytes,"double".getBytes(),baos.toByteArray());
HiveDecimal decimalValue=HiveDecimal.create("1.23");
baos.reset();
JavaHiveDecimalObjectInspector decimalOI=(JavaHiveDecimalObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(new DecimalTypeInfo(5,2));
LazyUtils.writePrimitiveUTF8(baos,decimalOI.create(decimalValue),decimalOI,false,(byte)0,null);
m.put(cfBytes,"decimal".getBytes(),baos.toByteArray());
Date now=new Date(System.currentTimeMillis());
DateWritable dateWritable=new DateWritable(now);
Date dateValue=dateWritable.get();
baos.reset();
JavaDateObjectInspector dateOI=(JavaDateObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.DATE_TYPE_NAME));
LazyUtils.writePrimitiveUTF8(baos,dateOI.create(dateValue),dateOI,false,(byte)0,null);
m.put(cfBytes,"date".getBytes(),baos.toByteArray());
Timestamp timestampValue=new Timestamp(now.getTime());
baos.reset();
JavaTimestampObjectInspector timestampOI=(JavaTimestampObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TIMESTAMP_TYPE_NAME));
LazyUtils.writePrimitiveUTF8(baos,timestampOI.create(timestampValue),timestampOI,false,(byte)0,null);
m.put(cfBytes,"timestamp".getBytes(),baos.toByteArray());
baos.reset();
HiveChar charValue=new HiveChar("char",4);
JavaHiveCharObjectInspector charOI=(JavaHiveCharObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(new CharTypeInfo(4));
LazyUtils.writePrimitiveUTF8(baos,charOI.create(charValue),charOI,false,(byte)0,null);
m.put(cfBytes,"char".getBytes(),baos.toByteArray());
baos.reset();
HiveVarchar varcharValue=new HiveVarchar("varchar",7);
JavaHiveVarcharObjectInspector varcharOI=(JavaHiveVarcharObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(new VarcharTypeInfo(7));
LazyUtils.writePrimitiveUTF8(baos,varcharOI.create(varcharValue),varcharOI,false,(byte)0,null);
m.put(cfBytes,"varchar".getBytes(),baos.toByteArray());
writer.addMutation(m);
writer.close();
for ( Entry e : conn.createScanner(tableName,new Authorizations())) {
System.out.println(e);
}
FileInputFormat.addInputPath(conf,new Path("unused"));
InputSplit[] splits=inputformat.getSplits(conf,0);
assertEquals(splits.length,1);
RecordReader reader=inputformat.getRecordReader(splits[0],conf,null);
Text key=reader.createKey();
AccumuloHiveRow value=reader.createValue();
reader.next(key,value);
Assert.assertEquals(13,value.getTuples().size());
Text cfText=new Text(cf), cqHolder=new Text();
cqHolder.set("string");
byte[] valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyStringObjectInspector lazyStringOI=LazyPrimitiveObjectInspectorFactory.getLazyStringObjectInspector(false,(byte)0);
LazyString lazyString=(LazyString)LazyFactory.createLazyObject(lazyStringOI);
lazyString.init(byteRef,0,valueBytes.length);
Assert.assertEquals(new Text(stringValue),lazyString.getWritableObject());
cqHolder.set("boolean");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyBooleanObjectInspector lazyBooleanOI=(LazyBooleanObjectInspector)LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BOOLEAN_TYPE_NAME));
LazyBoolean lazyBoolean=(LazyBoolean)LazyFactory.createLazyObject(lazyBooleanOI);
lazyBoolean.init(byteRef,0,valueBytes.length);
Assert.assertEquals(booleanValue,lazyBoolean.getWritableObject().get());
cqHolder.set("tinyint");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyByteObjectInspector lazyByteOI=(LazyByteObjectInspector)LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TINYINT_TYPE_NAME));
LazyByte lazyByte=(LazyByte)LazyFactory.createLazyObject(lazyByteOI);
lazyByte.init(byteRef,0,valueBytes.length);
Assert.assertEquals(tinyintValue,lazyByte.getWritableObject().get());
cqHolder.set("smallint");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyShortObjectInspector lazyShortOI=(LazyShortObjectInspector)LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.SMALLINT_TYPE_NAME));
LazyShort lazyShort=(LazyShort)LazyFactory.createLazyObject(lazyShortOI);
lazyShort.init(byteRef,0,valueBytes.length);
Assert.assertEquals(smallintValue,lazyShort.getWritableObject().get());
cqHolder.set("int");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyIntObjectInspector lazyIntOI=(LazyIntObjectInspector)LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INT_TYPE_NAME));
LazyInteger lazyInt=(LazyInteger)LazyFactory.createLazyObject(lazyIntOI);
lazyInt.init(byteRef,0,valueBytes.length);
Assert.assertEquals(intValue,lazyInt.getWritableObject().get());
cqHolder.set("bigint");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyLongObjectInspector lazyLongOI=(LazyLongObjectInspector)LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BIGINT_TYPE_NAME));
LazyLong lazyLong=(LazyLong)LazyFactory.createLazyObject(lazyLongOI);
lazyLong.init(byteRef,0,valueBytes.length);
Assert.assertEquals(bigintValue,lazyLong.getWritableObject().get());
cqHolder.set("float");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyFloatObjectInspector lazyFloatOI=(LazyFloatObjectInspector)LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.FLOAT_TYPE_NAME));
LazyFloat lazyFloat=(LazyFloat)LazyFactory.createLazyObject(lazyFloatOI);
lazyFloat.init(byteRef,0,valueBytes.length);
Assert.assertEquals(floatValue,lazyFloat.getWritableObject().get(),0);
cqHolder.set("double");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyDoubleObjectInspector lazyDoubleOI=(LazyDoubleObjectInspector)LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.DOUBLE_TYPE_NAME));
LazyDouble lazyDouble=(LazyDouble)LazyFactory.createLazyObject(lazyDoubleOI);
lazyDouble.init(byteRef,0,valueBytes.length);
Assert.assertEquals(doubleValue,lazyDouble.getWritableObject().get(),0);
cqHolder.set("decimal");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyHiveDecimalObjectInspector lazyDecimalOI=(LazyHiveDecimalObjectInspector)LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(new DecimalTypeInfo(5,2));
LazyHiveDecimal lazyDecimal=(LazyHiveDecimal)LazyFactory.createLazyObject(lazyDecimalOI);
lazyDecimal.init(byteRef,0,valueBytes.length);
Assert.assertEquals(decimalValue,lazyDecimal.getWritableObject().getHiveDecimal());
cqHolder.set("date");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyDateObjectInspector lazyDateOI=(LazyDateObjectInspector)LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.DATE_TYPE_NAME));
LazyDate lazyDate=(LazyDate)LazyFactory.createLazyObject(lazyDateOI);
lazyDate.init(byteRef,0,valueBytes.length);
Assert.assertEquals(dateValue,lazyDate.getWritableObject().get());
cqHolder.set("timestamp");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyTimestampObjectInspector lazyTimestampOI=(LazyTimestampObjectInspector)LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TIMESTAMP_TYPE_NAME));
LazyTimestamp lazyTimestamp=(LazyTimestamp)LazyFactory.createLazyObject(lazyTimestampOI);
lazyTimestamp.init(byteRef,0,valueBytes.length);
Assert.assertEquals(timestampValue,lazyTimestamp.getWritableObject().getTimestamp());
cqHolder.set("char");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyHiveCharObjectInspector lazyCharOI=(LazyHiveCharObjectInspector)LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(new CharTypeInfo(4));
LazyHiveChar lazyChar=(LazyHiveChar)LazyFactory.createLazyObject(lazyCharOI);
lazyChar.init(byteRef,0,valueBytes.length);
Assert.assertEquals(charValue,lazyChar.getWritableObject().getHiveChar());
cqHolder.set("varchar");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyHiveVarcharObjectInspector lazyVarcharOI=(LazyHiveVarcharObjectInspector)LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(new VarcharTypeInfo(7));
LazyHiveVarchar lazyVarchar=(LazyHiveVarchar)LazyFactory.createLazyObject(lazyVarcharOI);
lazyVarchar.init(byteRef,0,valueBytes.length);
Assert.assertEquals(varcharValue.toString(),lazyVarchar.getWritableObject().getHiveVarchar().toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testBinaryTypes() throws Exception {
final String tableName=test.getMethodName(), user="root", pass="";
MockInstance mockInstance=new MockInstance(test.getMethodName());
Connector conn=mockInstance.getConnector(user,new PasswordToken(pass));
HiveAccumuloTableInputFormat inputformat=new HiveAccumuloTableInputFormat();
JobConf conf=new JobConf();
conf.set(AccumuloSerDeParameters.TABLE_NAME,tableName);
conf.set(AccumuloSerDeParameters.USE_MOCK_INSTANCE,"true");
conf.set(AccumuloSerDeParameters.INSTANCE_NAME,test.getMethodName());
conf.set(AccumuloSerDeParameters.USER_NAME,user);
conf.set(AccumuloSerDeParameters.USER_PASS,pass);
conf.set(AccumuloSerDeParameters.ZOOKEEPERS,"localhost:2181");
conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS,AccumuloHiveConstants.ROWID + ",cf:string,cf:boolean,cf:tinyint,cf:smallint,cf:int,cf:bigint" + ",cf:float,cf:double,cf:decimal,cf:date,cf:timestamp,cf:char,cf:varchar");
conf.set(serdeConstants.LIST_COLUMNS,"string,string,boolean,tinyint,smallint,int,bigint,float,double,decimal,date,timestamp,char(4),varchar(7)");
conf.set(serdeConstants.LIST_COLUMN_TYPES,"string,string,boolean,tinyint,smallint,int,bigint,float,double,decimal,date,timestamp,char(4),varchar(7)");
conf.set(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE,"binary");
conn.tableOperations().create(tableName);
BatchWriterConfig writerConf=new BatchWriterConfig();
BatchWriter writer=conn.createBatchWriter(tableName,writerConf);
ByteArrayOutputStream baos=new ByteArrayOutputStream();
DataOutputStream out=new DataOutputStream(baos);
String cf="cf";
byte[] cfBytes=cf.getBytes();
Mutation m=new Mutation("row1");
String stringValue="string";
JavaStringObjectInspector stringOI=(JavaStringObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME));
LazyUtils.writePrimitiveUTF8(baos,stringOI.create(stringValue),stringOI,false,(byte)0,null);
m.put(cfBytes,"string".getBytes(),baos.toByteArray());
boolean booleanValue=true;
baos.reset();
JavaBooleanObjectInspector booleanOI=(JavaBooleanObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BOOLEAN_TYPE_NAME));
LazyUtils.writePrimitive(baos,booleanOI.create(booleanValue),booleanOI);
m.put(cfBytes,"boolean".getBytes(),baos.toByteArray());
byte tinyintValue=-127;
baos.reset();
JavaByteObjectInspector byteOI=(JavaByteObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TINYINT_TYPE_NAME));
LazyUtils.writePrimitive(baos,tinyintValue,byteOI);
m.put(cfBytes,"tinyint".getBytes(),baos.toByteArray());
short smallintValue=Short.MAX_VALUE;
baos.reset();
JavaShortObjectInspector shortOI=(JavaShortObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.SMALLINT_TYPE_NAME));
LazyUtils.writePrimitive(baos,smallintValue,shortOI);
m.put(cfBytes,"smallint".getBytes(),baos.toByteArray());
int intValue=Integer.MAX_VALUE;
baos.reset();
JavaIntObjectInspector intOI=(JavaIntObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INT_TYPE_NAME));
LazyUtils.writePrimitive(baos,intValue,intOI);
m.put(cfBytes,"int".getBytes(),baos.toByteArray());
long bigintValue=Long.MAX_VALUE;
baos.reset();
JavaLongObjectInspector longOI=(JavaLongObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BIGINT_TYPE_NAME));
LazyUtils.writePrimitive(baos,bigintValue,longOI);
m.put(cfBytes,"bigint".getBytes(),baos.toByteArray());
float floatValue=Float.MAX_VALUE;
baos.reset();
JavaFloatObjectInspector floatOI=(JavaFloatObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.FLOAT_TYPE_NAME));
LazyUtils.writePrimitive(baos,floatValue,floatOI);
m.put(cfBytes,"float".getBytes(),baos.toByteArray());
double doubleValue=Double.MAX_VALUE;
baos.reset();
JavaDoubleObjectInspector doubleOI=(JavaDoubleObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.DOUBLE_TYPE_NAME));
LazyUtils.writePrimitive(baos,doubleValue,doubleOI);
m.put(cfBytes,"double".getBytes(),baos.toByteArray());
baos.reset();
HiveDecimal decimalValue=HiveDecimal.create(65536l);
HiveDecimalWritable decimalWritable=new HiveDecimalWritable(decimalValue);
decimalWritable.write(out);
m.put(cfBytes,"decimal".getBytes(),baos.toByteArray());
baos.reset();
Date now=new Date(System.currentTimeMillis());
DateWritable dateWritable=new DateWritable(now);
Date dateValue=dateWritable.get();
dateWritable.write(out);
m.put(cfBytes,"date".getBytes(),baos.toByteArray());
baos.reset();
Timestamp timestampValue=new Timestamp(now.getTime());
ByteStream.Output output=new ByteStream.Output();
TimestampWritable timestampWritable=new TimestampWritable(new Timestamp(now.getTime()));
timestampWritable.write(output);
output.close();
m.put(cfBytes,"timestamp".getBytes(),output.toByteArray());
baos.reset();
HiveChar charValue=new HiveChar("char",4);
JavaHiveCharObjectInspector charOI=(JavaHiveCharObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(new CharTypeInfo(4));
LazyUtils.writePrimitiveUTF8(baos,charOI.create(charValue),charOI,false,(byte)0,null);
m.put(cfBytes,"char".getBytes(),baos.toByteArray());
baos.reset();
HiveVarchar varcharValue=new HiveVarchar("varchar",7);
JavaHiveVarcharObjectInspector varcharOI=(JavaHiveVarcharObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(new VarcharTypeInfo(7));
LazyUtils.writePrimitiveUTF8(baos,varcharOI.create(varcharValue),varcharOI,false,(byte)0,null);
m.put(cfBytes,"varchar".getBytes(),baos.toByteArray());
writer.addMutation(m);
writer.close();
for ( Entry e : conn.createScanner(tableName,new Authorizations())) {
System.out.println(e);
}
FileInputFormat.addInputPath(conf,new Path("unused"));
InputSplit[] splits=inputformat.getSplits(conf,0);
assertEquals(splits.length,1);
RecordReader reader=inputformat.getRecordReader(splits[0],conf,null);
Text key=reader.createKey();
AccumuloHiveRow value=reader.createValue();
reader.next(key,value);
Assert.assertEquals(13,value.getTuples().size());
ByteArrayRef byteRef=new ByteArrayRef();
Text cfText=new Text(cf), cqHolder=new Text();
cqHolder.set("string");
byte[] valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyStringObjectInspector lazyStringOI=LazyPrimitiveObjectInspectorFactory.getLazyStringObjectInspector(false,(byte)0);
LazyString lazyString=(LazyString)LazyFactory.createLazyObject(lazyStringOI);
lazyString.init(byteRef,0,valueBytes.length);
Assert.assertEquals(stringValue,lazyString.getWritableObject().toString());
cqHolder.set("boolean");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyBooleanObjectInspector lazyBooleanOI=(LazyBooleanObjectInspector)LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BOOLEAN_TYPE_NAME));
LazyBoolean lazyBoolean=(LazyBoolean)LazyFactory.createLazyPrimitiveBinaryClass(lazyBooleanOI);
lazyBoolean.init(byteRef,0,valueBytes.length);
Assert.assertEquals(booleanValue,lazyBoolean.getWritableObject().get());
cqHolder.set("tinyint");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyByteObjectInspector lazyByteOI=(LazyByteObjectInspector)LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TINYINT_TYPE_NAME));
LazyByte lazyByte=(LazyByte)LazyFactory.createLazyPrimitiveBinaryClass(lazyByteOI);
lazyByte.init(byteRef,0,valueBytes.length);
Assert.assertEquals(tinyintValue,lazyByte.getWritableObject().get());
cqHolder.set("smallint");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyShortObjectInspector lazyShortOI=(LazyShortObjectInspector)LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.SMALLINT_TYPE_NAME));
LazyShort lazyShort=(LazyShort)LazyFactory.createLazyPrimitiveBinaryClass(lazyShortOI);
lazyShort.init(byteRef,0,valueBytes.length);
Assert.assertEquals(smallintValue,lazyShort.getWritableObject().get());
cqHolder.set("int");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyIntObjectInspector lazyIntOI=(LazyIntObjectInspector)LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INT_TYPE_NAME));
LazyInteger lazyInt=(LazyInteger)LazyFactory.createLazyPrimitiveBinaryClass(lazyIntOI);
lazyInt.init(byteRef,0,valueBytes.length);
Assert.assertEquals(intValue,lazyInt.getWritableObject().get());
cqHolder.set("bigint");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyLongObjectInspector lazyLongOI=(LazyLongObjectInspector)LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BIGINT_TYPE_NAME));
LazyLong lazyLong=(LazyLong)LazyFactory.createLazyPrimitiveBinaryClass(lazyLongOI);
lazyLong.init(byteRef,0,valueBytes.length);
Assert.assertEquals(bigintValue,lazyLong.getWritableObject().get());
cqHolder.set("float");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyFloatObjectInspector lazyFloatOI=(LazyFloatObjectInspector)LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.FLOAT_TYPE_NAME));
LazyFloat lazyFloat=(LazyFloat)LazyFactory.createLazyPrimitiveBinaryClass(lazyFloatOI);
lazyFloat.init(byteRef,0,valueBytes.length);
Assert.assertEquals(floatValue,lazyFloat.getWritableObject().get(),0);
cqHolder.set("double");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyDoubleObjectInspector lazyDoubleOI=(LazyDoubleObjectInspector)LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.DOUBLE_TYPE_NAME));
LazyDouble lazyDouble=(LazyDouble)LazyFactory.createLazyPrimitiveBinaryClass(lazyDoubleOI);
lazyDouble.init(byteRef,0,valueBytes.length);
Assert.assertEquals(doubleValue,lazyDouble.getWritableObject().get(),0);
cqHolder.set("decimal");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
ByteArrayInputStream bais=new ByteArrayInputStream(valueBytes);
DataInputStream in=new DataInputStream(bais);
decimalWritable.readFields(in);
Assert.assertEquals(decimalValue,decimalWritable.getHiveDecimal());
cqHolder.set("date");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
bais=new ByteArrayInputStream(valueBytes);
in=new DataInputStream(bais);
dateWritable.readFields(in);
Assert.assertEquals(dateValue,dateWritable.get());
cqHolder.set("timestamp");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
bais=new ByteArrayInputStream(valueBytes);
in=new DataInputStream(bais);
timestampWritable.readFields(in);
Assert.assertEquals(timestampValue,timestampWritable.getTimestamp());
cqHolder.set("char");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyHiveCharObjectInspector lazyCharOI=(LazyHiveCharObjectInspector)LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(new CharTypeInfo(4));
LazyHiveChar lazyChar=(LazyHiveChar)LazyFactory.createLazyObject(lazyCharOI);
lazyChar.init(byteRef,0,valueBytes.length);
Assert.assertEquals(charValue,lazyChar.getWritableObject().getHiveChar());
cqHolder.set("varchar");
valueBytes=value.getValue(cfText,cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyHiveVarcharObjectInspector lazyVarcharOI=(LazyHiveVarcharObjectInspector)LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(new VarcharTypeInfo(7));
LazyHiveVarchar lazyVarchar=(LazyHiveVarchar)LazyFactory.createLazyObject(lazyVarcharOI);
lazyVarchar.init(byteRef,0,valueBytes.length);
Assert.assertEquals(varcharValue.toString(),lazyVarchar.getWritableObject().getHiveVarchar().toString());
}
Class: org.apache.hadoop.hive.accumulo.predicate.TestAccumuloPredicateHandler InternalCallVerifier EqualityVerifier
@Test public void testEmptyListRangeGeneratorOutput() throws SerDeException {
AccumuloPredicateHandler mockHandler=Mockito.mock(AccumuloPredicateHandler.class);
ExprNodeDesc root=Mockito.mock(ExprNodeDesc.class);
String hiveRowIdColumnName="rid";
Mockito.when(mockHandler.getRanges(conf,columnMapper)).thenCallRealMethod();
Mockito.when(mockHandler.generateRanges(columnMapper,hiveRowIdColumnName,root)).thenReturn(Collections.emptyList());
Mockito.when(mockHandler.getExpression(conf)).thenReturn(root);
Assert.assertEquals(Collections.emptyList(),mockHandler.getRanges(conf,columnMapper));
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testBinaryRangeGeneration() throws Exception {
List columnNames=Arrays.asList("key","column");
List columnTypes=Arrays.asList(TypeInfoFactory.intTypeInfo,TypeInfoFactory.stringTypeInfo);
conf.set(serdeConstants.LIST_COLUMNS,Joiner.on(',').join(columnNames));
conf.set(serdeConstants.LIST_COLUMN_TYPES,"int,string");
String columnMappingStr=":rowID#b,cf:f1";
conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS,columnMappingStr);
columnMapper=new ColumnMapper(columnMappingStr,ColumnEncoding.STRING.getName(),columnNames,columnTypes);
int intValue=100;
ByteArrayOutputStream baos=new ByteArrayOutputStream();
JavaIntObjectInspector intOI=(JavaIntObjectInspector)PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INT_TYPE_NAME));
LazyUtils.writePrimitive(baos,intValue,intOI);
ExprNodeDesc column=new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo,"key",null,false);
ExprNodeDesc constant=new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo,intValue);
List children=Lists.newArrayList();
children.add(constant);
children.add(column);
ExprNodeGenericFuncDesc node=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPLessThan(),children);
assertNotNull(node);
String filterExpr=SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR,filterExpr);
List ranges=handler.getRanges(conf,columnMapper);
Assert.assertEquals(1,ranges.size());
Assert.assertEquals(new Range(new Text(baos.toByteArray()),false,null,false),ranges.get(0));
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testGetRowIDSearchCondition(){
ExprNodeDesc column=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"hi");
List children=Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeGenericFuncDesc node=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqual(),children);
assertNotNull(node);
String filterExpr=SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR,filterExpr);
List sConditions=handler.getSearchConditions(conf);
assertEquals(sConditions.size(),1);
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void rangeLessThan() throws SerDeException {
ExprNodeDesc column=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"aaa");
List children=Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeGenericFuncDesc node=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPLessThan(),children);
assertNotNull(node);
String filterExpr=SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR,filterExpr);
Collection ranges=handler.getRanges(conf,columnMapper);
assertEquals(ranges.size(),1);
Range range=ranges.iterator().next();
assertTrue(range.isStartKeyInclusive());
assertFalse(range.isEndKeyInclusive());
assertFalse(range.contains(new Key(new Text("aaa"))));
assertTrue(range.afterEndKey(new Key(new Text("ccccc"))));
assertTrue(range.contains(new Key(new Text("aa"))));
assertTrue(range.afterEndKey(new Key(new Text("aab"))));
assertTrue(range.afterEndKey(new Key(new Text("aaa"))));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test() public void testRangeEqual() throws SerDeException {
ExprNodeDesc column=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"aaa");
List children=Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeGenericFuncDesc node=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqual(),children);
assertNotNull(node);
String filterExpr=SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR,filterExpr);
Collection ranges=handler.getRanges(conf,columnMapper);
assertEquals(ranges.size(),1);
Range range=ranges.iterator().next();
assertTrue(range.isStartKeyInclusive());
assertFalse(range.isEndKeyInclusive());
assertTrue(range.contains(new Key(new Text("aaa"))));
assertTrue(range.afterEndKey(new Key(new Text("aab"))));
assertTrue(range.beforeStartKey(new Key(new Text("aa"))));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testMultipleRanges() throws SerDeException {
ExprNodeDesc column=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"aaa");
List children=Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqualOrGreaterThan(),children);
assertNotNull(node);
ExprNodeDesc column2=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant2=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"bbb");
List children2=Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
ExprNodeDesc node2=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPLessThan(),children2);
assertNotNull(node2);
List bothFilters=Lists.newArrayList();
bothFilters.add(node);
bothFilters.add(node2);
ExprNodeGenericFuncDesc both=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPAnd(),bothFilters);
String filterExpr=SerializationUtilities.serializeExpression(both);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR,filterExpr);
List ranges=handler.getRanges(conf,columnMapper);
assertEquals(1,ranges.size());
Range range=ranges.get(0);
assertEquals(new Range(new Key("aaa"),true,new Key("bbb"),false),range);
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testPushdownTuple() throws SerDeException, NoSuchPrimitiveComparisonException, NoSuchCompareOpException {
ExprNodeDesc column=new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo,"field1",null,false);
ExprNodeDesc constant=new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo,5);
List children=Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeGenericFuncDesc node=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqual(),children);
assertNotNull(node);
String filterExpr=SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR,filterExpr);
List sConditions=handler.getSearchConditions(conf);
assertEquals(sConditions.size(),1);
IndexSearchCondition sc=sConditions.get(0);
PushdownTuple tuple=new PushdownTuple(sConditions.get(0),handler.getPrimitiveComparison(sc.getColumnDesc().getTypeString(),sc),handler.getCompareOp(sc.getComparisonOp(),sc));
byte[] expectedVal=new byte[4];
ByteBuffer.wrap(expectedVal).putInt(5);
assertArrayEquals(tuple.getConstVal(),expectedVal);
assertEquals(tuple.getcOpt().getClass(),Equal.class);
assertEquals(tuple.getpCompare().getClass(),IntCompare.class);
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testDisjointRanges() throws SerDeException {
ExprNodeDesc column=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"aaa");
List children=Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqualOrLessThan(),children);
assertNotNull(node);
ExprNodeDesc column2=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant2=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"bbb");
List children2=Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
ExprNodeDesc node2=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPGreaterThan(),children2);
assertNotNull(node2);
List bothFilters=Lists.newArrayList();
bothFilters.add(node);
bothFilters.add(node2);
ExprNodeGenericFuncDesc both=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPAnd(),bothFilters);
String filterExpr=SerializationUtilities.serializeExpression(both);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR,filterExpr);
Collection ranges=handler.getRanges(conf,columnMapper);
assertEquals(0,ranges.size());
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testRowRangeGeneration() throws SerDeException {
List columnNames=Arrays.asList("key","column");
List columnTypes=Arrays.asList(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo);
conf.set(serdeConstants.LIST_COLUMNS,Joiner.on(',').join(columnNames));
conf.set(serdeConstants.LIST_COLUMN_TYPES,"string,string");
String columnMappingStr=":rowID,cf:f1";
conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS,columnMappingStr);
columnMapper=new ColumnMapper(columnMappingStr,ColumnEncoding.STRING.getName(),columnNames,columnTypes);
ExprNodeDesc column=new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo,"key",null,false);
ExprNodeDesc constant=new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo,100);
List children=Lists.newArrayList();
children.add(constant);
children.add(column);
ExprNodeGenericFuncDesc node=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPLessThan(),children);
assertNotNull(node);
String filterExpr=SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR,filterExpr);
List ranges=handler.getRanges(conf,columnMapper);
Assert.assertEquals(1,ranges.size());
Assert.assertEquals(new Range(new Text("100"),false,null,false),ranges.get(0));
}
InternalCallVerifier EqualityVerifier
@Test public void testNullRangeGeneratorOutput() throws SerDeException {
AccumuloPredicateHandler mockHandler=Mockito.mock(AccumuloPredicateHandler.class);
ExprNodeDesc root=Mockito.mock(ExprNodeDesc.class);
String hiveRowIdColumnName="rid";
Mockito.when(mockHandler.getRanges(conf,columnMapper)).thenCallRealMethod();
Mockito.when(mockHandler.generateRanges(columnMapper,hiveRowIdColumnName,root)).thenReturn(null);
Mockito.when(mockHandler.getExpression(conf)).thenReturn(root);
Assert.assertEquals(Arrays.asList(new Range()),mockHandler.getRanges(conf,columnMapper));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void rangeLessThanOrEqual() throws SerDeException {
ExprNodeDesc column=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"aaa");
List children=Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeGenericFuncDesc node=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqualOrLessThan(),children);
assertNotNull(node);
String filterExpr=SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR,filterExpr);
Collection ranges=handler.getRanges(conf,columnMapper);
assertEquals(ranges.size(),1);
Range range=ranges.iterator().next();
assertTrue(range.isStartKeyInclusive());
assertFalse(range.isEndKeyInclusive());
assertTrue(range.contains(new Key(new Text("aaa"))));
assertTrue(range.afterEndKey(new Key(new Text("ccccc"))));
assertTrue(range.contains(new Key(new Text("aa"))));
assertTrue(range.afterEndKey(new Key(new Text("aab"))));
assertFalse(range.afterEndKey(new Key(new Text("aaa"))));
}
UtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testPushdownComparisonOptNotSupported(){
try {
ExprNodeDesc column=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"field1",null,false);
List children=Lists.newArrayList();
children.add(column);
ExprNodeGenericFuncDesc node=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPNotNull(),children);
assertNotNull(node);
String filterExpr=SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR,filterExpr);
List sConditions=handler.getSearchConditions(conf);
assertEquals(sConditions.size(),1);
IndexSearchCondition sc=sConditions.get(0);
new PushdownTuple(sc,handler.getPrimitiveComparison(sc.getColumnDesc().getTypeString(),sc),handler.getCompareOp(sc.getComparisonOp(),sc));
fail("Should fail: compare op not registered for index analyzer. Should leave undesirable residual predicate");
}
catch ( RuntimeException e) {
assertTrue(e.getMessage().contains("Unexpected residual predicate: field1 is not null"));
}
catch ( Exception e) {
fail(StringUtils.stringifyException(e));
}
}
BranchVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testCreateIteratorSettings() throws Exception {
conf=new JobConf();
List columnNames=Arrays.asList("field1","field2","rid");
List columnTypes=Arrays.asList(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.intTypeInfo,TypeInfoFactory.stringTypeInfo);
conf.set(serdeConstants.LIST_COLUMNS,Joiner.on(',').join(columnNames));
conf.set(serdeConstants.LIST_COLUMN_TYPES,"string,int,string");
String columnMappingStr="cf:f1,cf:f2,:rowID";
conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS,columnMappingStr);
columnMapper=new ColumnMapper(columnMappingStr,ColumnEncoding.STRING.getName(),columnNames,columnTypes);
ExprNodeDesc column=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"field1",null,false);
ExprNodeDesc constant=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"aaa");
List children=Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqualOrLessThan(),children);
assertNotNull(node);
ExprNodeDesc column2=new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo,"field2",null,false);
ExprNodeDesc constant2=new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo,5);
List children2=Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
ExprNodeDesc node2=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPGreaterThan(),children2);
assertNotNull(node2);
List bothFilters=Lists.newArrayList();
bothFilters.add(node);
bothFilters.add(node2);
ExprNodeGenericFuncDesc both=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPAnd(),bothFilters);
String filterExpr=SerializationUtilities.serializeExpression(both);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR,filterExpr);
List iterators=handler.getIterators(conf,columnMapper);
assertEquals(iterators.size(),2);
IteratorSetting is1=iterators.get(0);
IteratorSetting is2=iterators.get(1);
boolean foundQual=false;
boolean foundPCompare=false;
boolean foundCOpt=false;
boolean foundConst=false;
for ( Map.Entry option : is1.getOptions().entrySet()) {
String optKey=option.getKey();
if (optKey.equals(PrimitiveComparisonFilter.COLUMN)) {
foundQual=true;
assertEquals(option.getValue(),"cf:f1");
}
else if (optKey.equals(PrimitiveComparisonFilter.CONST_VAL)) {
foundConst=true;
assertEquals(option.getValue(),new String(Base64.encodeBase64("aaa".getBytes())));
}
else if (optKey.equals(PrimitiveComparisonFilter.COMPARE_OPT_CLASS)) {
foundCOpt=true;
assertEquals(option.getValue(),LessThanOrEqual.class.getName());
}
else if (optKey.equals(PrimitiveComparisonFilter.P_COMPARE_CLASS)) {
foundPCompare=true;
assertEquals(option.getValue(),StringCompare.class.getName());
}
}
assertTrue(foundConst & foundCOpt & foundPCompare& foundQual);
foundQual=false;
foundPCompare=false;
foundCOpt=false;
foundConst=false;
for ( Map.Entry option : is2.getOptions().entrySet()) {
String optKey=option.getKey();
if (optKey.equals(PrimitiveComparisonFilter.COLUMN)) {
foundQual=true;
assertEquals(option.getValue(),"cf:f2");
}
else if (optKey.equals(PrimitiveComparisonFilter.CONST_VAL)) {
foundConst=true;
byte[] intVal=new byte[4];
ByteBuffer.wrap(intVal).putInt(5);
assertEquals(option.getValue(),new String(Base64.encodeBase64(intVal)));
}
else if (optKey.equals(PrimitiveComparisonFilter.COMPARE_OPT_CLASS)) {
foundCOpt=true;
assertEquals(option.getValue(),GreaterThan.class.getName());
}
else if (optKey.equals(PrimitiveComparisonFilter.P_COMPARE_CLASS)) {
foundPCompare=true;
assertEquals(option.getValue(),IntCompare.class.getName());
}
}
assertTrue(foundConst & foundCOpt & foundPCompare& foundQual);
}
UtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testIgnoreIteratorPushdown() throws TooManyAccumuloColumnsException {
conf=new JobConf();
List columnNames=Arrays.asList("field1","field2","rid");
List columnTypes=Arrays.asList(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.intTypeInfo,TypeInfoFactory.stringTypeInfo);
conf.set(serdeConstants.LIST_COLUMNS,Joiner.on(',').join(columnNames));
conf.set(serdeConstants.LIST_COLUMN_TYPES,"string,int,string");
String columnMappingStr="cf:f1,cf:f2,:rowID";
conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS,columnMappingStr);
columnMapper=new ColumnMapper(columnMappingStr,ColumnEncoding.STRING.getName(),columnNames,columnTypes);
ExprNodeDesc column=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"field1",null,false);
ExprNodeDesc constant=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"aaa");
List children=Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqualOrLessThan(),children);
assertNotNull(node);
ExprNodeDesc column2=new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo,"field2",null,false);
ExprNodeDesc constant2=new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo,5);
List children2=Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
ExprNodeDesc node2=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPGreaterThan(),children2);
assertNotNull(node2);
List bothFilters=Lists.newArrayList();
bothFilters.add(node);
bothFilters.add(node2);
ExprNodeGenericFuncDesc both=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPAnd(),bothFilters);
String filterExpr=SerializationUtilities.serializeExpression(both);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR,filterExpr);
conf.setBoolean(AccumuloSerDeParameters.ITERATOR_PUSHDOWN_KEY,false);
try {
List iterators=handler.getIterators(conf,columnMapper);
assertEquals(iterators.size(),0);
}
catch ( Exception e) {
fail(StringUtils.stringifyException(e));
}
}
InternalCallVerifier EqualityVerifier NullVerifier ExceptionVerifier HybridVerifier
@Test(expected=NoSuchPrimitiveComparisonException.class) public void testPushdownColumnTypeNotSupported() throws SerDeException, NoSuchPrimitiveComparisonException, NoSuchCompareOpException {
ExprNodeDesc column=new ExprNodeColumnDesc(TypeInfoFactory.floatTypeInfo,"field1",null,false);
ExprNodeDesc constant=new ExprNodeConstantDesc(TypeInfoFactory.floatTypeInfo,5.5f);
List children=Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeGenericFuncDesc node=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqual(),children);
assertNotNull(node);
String filterExpr=SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR,filterExpr);
List sConditions=handler.getSearchConditions(conf);
assertEquals(sConditions.size(),1);
IndexSearchCondition sc=sConditions.get(0);
handler.getPrimitiveComparison(sc.getColumnDesc().getTypeString(),sc);
}
InternalCallVerifier EqualityVerifier
@Test public void testSingleRangeGeneratorOutput() throws SerDeException {
AccumuloPredicateHandler mockHandler=Mockito.mock(AccumuloPredicateHandler.class);
ExprNodeDesc root=Mockito.mock(ExprNodeDesc.class);
String hiveRowIdColumnName="rid";
Range r=new Range("a");
Mockito.when(mockHandler.getRanges(conf,columnMapper)).thenCallRealMethod();
Mockito.when(mockHandler.generateRanges(columnMapper,hiveRowIdColumnName,root)).thenReturn(r);
Mockito.when(mockHandler.getExpression(conf)).thenReturn(root);
Assert.assertEquals(Collections.singletonList(r),mockHandler.getRanges(conf,columnMapper));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test() public void testRangeGreaterThan() throws SerDeException {
ExprNodeDesc column=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"aaa");
List children=Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeGenericFuncDesc node=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPGreaterThan(),children);
assertNotNull(node);
String filterExpr=SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR,filterExpr);
Collection ranges=handler.getRanges(conf,columnMapper);
assertEquals(ranges.size(),1);
Range range=ranges.iterator().next();
assertTrue(range.isStartKeyInclusive());
assertFalse(range.isEndKeyInclusive());
assertFalse(range.contains(new Key(new Text("aaa"))));
assertFalse(range.afterEndKey(new Key(new Text("ccccc"))));
assertTrue(range.contains(new Key(new Text("aab"))));
assertTrue(range.beforeStartKey(new Key(new Text("aa"))));
assertTrue(range.beforeStartKey(new Key(new Text("aaa"))));
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testIteratorIgnoreRowIDFields(){
ExprNodeDesc column=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"aaa");
List children=Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqualOrLessThan(),children);
assertNotNull(node);
ExprNodeDesc column2=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant2=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"bbb");
List children2=Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
ExprNodeDesc node2=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPGreaterThan(),children2);
assertNotNull(node2);
List bothFilters=Lists.newArrayList();
bothFilters.add(node);
bothFilters.add(node2);
ExprNodeGenericFuncDesc both=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPAnd(),bothFilters);
String filterExpr=SerializationUtilities.serializeExpression(both);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR,filterExpr);
try {
List iterators=handler.getIterators(conf,columnMapper);
assertEquals(iterators.size(),0);
}
catch ( SerDeException e) {
StringUtils.stringifyException(e);
}
}
InternalCallVerifier EqualityVerifier
@Test public void testManyRangesGeneratorOutput() throws SerDeException {
AccumuloPredicateHandler mockHandler=Mockito.mock(AccumuloPredicateHandler.class);
ExprNodeDesc root=Mockito.mock(ExprNodeDesc.class);
String hiveRowIdColumnName="rid";
Range r1=new Range("a"), r2=new Range("z");
Mockito.when(mockHandler.getRanges(conf,columnMapper)).thenCallRealMethod();
Mockito.when(mockHandler.generateRanges(columnMapper,hiveRowIdColumnName,root)).thenReturn(Arrays.asList(r1,r2));
Mockito.when(mockHandler.getExpression(conf)).thenReturn(root);
Assert.assertEquals(Arrays.asList(r1,r2),mockHandler.getRanges(conf,columnMapper));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void rangeGreaterThanOrEqual() throws SerDeException {
ExprNodeDesc column=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"aaa");
List children=Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeGenericFuncDesc node=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqualOrGreaterThan(),children);
assertNotNull(node);
String filterExpr=SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR,filterExpr);
Collection ranges=handler.getRanges(conf,columnMapper);
assertEquals(ranges.size(),1);
Range range=ranges.iterator().next();
assertTrue(range.isStartKeyInclusive());
assertFalse(range.isEndKeyInclusive());
assertTrue(range.contains(new Key(new Text("aaa"))));
assertFalse(range.afterEndKey(new Key(new Text("ccccc"))));
assertTrue(range.contains(new Key(new Text("aab"))));
assertTrue(range.beforeStartKey(new Key(new Text("aa"))));
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testRowRangeIntersection() throws SerDeException {
ExprNodeDesc column=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"f");
List children=Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqualOrGreaterThan(),children);
assertNotNull(node);
ExprNodeDesc column2=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant2=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"m");
List children2=Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
ExprNodeDesc node2=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqualOrLessThan(),children2);
assertNotNull(node2);
List bothFilters=Lists.newArrayList();
bothFilters.add(node);
bothFilters.add(node2);
ExprNodeGenericFuncDesc both=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPAnd(),bothFilters);
String filterExpr=SerializationUtilities.serializeExpression(both);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR,filterExpr);
List ranges=handler.getRanges(conf,columnMapper);
assertEquals(1,ranges.size());
assertEquals(new Range(new Key("f"),true,new Key("m\0"),false),ranges.get(0));
}
Class: org.apache.hadoop.hive.accumulo.predicate.TestAccumuloRangeGenerator APIUtilityVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testRangeConjunction() throws Exception {
ExprNodeDesc column=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"f");
List children=Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqualOrGreaterThan(),children);
assertNotNull(node);
ExprNodeDesc column2=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant2=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"m");
List children2=Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
ExprNodeDesc node2=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqualOrLessThan(),children2);
assertNotNull(node2);
List bothFilters=Lists.newArrayList();
bothFilters.add(node);
bothFilters.add(node2);
ExprNodeGenericFuncDesc both=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPAnd(),bothFilters);
List expectedRanges=Arrays.asList(new Range(new Key("f"),true,new Key("m\0"),false));
AccumuloRangeGenerator rangeGenerator=new AccumuloRangeGenerator(handler,rowIdMapping,"rid");
Dispatcher disp=new DefaultRuleDispatcher(rangeGenerator,Collections.emptyMap(),null);
GraphWalker ogw=new DefaultGraphWalker(disp);
ArrayList topNodes=new ArrayList();
topNodes.add(both);
HashMap nodeOutput=new HashMap();
try {
ogw.startWalking(topNodes,nodeOutput);
}
catch ( SemanticException ex) {
throw new RuntimeException(ex);
}
Object result=nodeOutput.get(both);
Assert.assertNotNull(result);
Assert.assertTrue("Result from graph walk was not a List",result instanceof List);
@SuppressWarnings("unchecked") List actualRanges=(List)result;
Assert.assertEquals(expectedRanges,actualRanges);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testDateRangeConjunction() throws Exception {
ExprNodeDesc column=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant=new ExprNodeConstantDesc(TypeInfoFactory.dateTypeInfo,Date.valueOf("2014-01-01"));
List children=Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqualOrGreaterThan(),children);
assertNotNull(node);
ExprNodeDesc column2=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant2=new ExprNodeConstantDesc(TypeInfoFactory.dateTypeInfo,Date.valueOf("2014-07-01"));
List children2=Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
ExprNodeDesc node2=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPLessThan(),children2);
assertNotNull(node2);
List bothFilters=Lists.newArrayList();
bothFilters.add(node);
bothFilters.add(node2);
ExprNodeGenericFuncDesc both=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPAnd(),bothFilters);
List expectedRanges=Arrays.asList(new Range(new Key("2014-01-01"),true,new Key("2014-07-01"),false));
AccumuloRangeGenerator rangeGenerator=new AccumuloRangeGenerator(handler,rowIdMapping,"rid");
Dispatcher disp=new DefaultRuleDispatcher(rangeGenerator,Collections.emptyMap(),null);
GraphWalker ogw=new DefaultGraphWalker(disp);
ArrayList topNodes=new ArrayList();
topNodes.add(both);
HashMap nodeOutput=new HashMap();
try {
ogw.startWalking(topNodes,nodeOutput);
}
catch ( SemanticException ex) {
throw new RuntimeException(ex);
}
Object result=nodeOutput.get(both);
Assert.assertNotNull(result);
Assert.assertTrue("Result from graph walk was not a List",result instanceof List);
@SuppressWarnings("unchecked") List actualRanges=(List)result;
Assert.assertEquals(expectedRanges,actualRanges);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testPartialRangeConjunction() throws Exception {
ExprNodeDesc column=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"f");
List children=Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqualOrGreaterThan(),children);
assertNotNull(node);
ExprNodeDesc column2=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"anythingElse",null,false);
ExprNodeDesc constant2=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"foo");
List children2=Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
ExprNodeDesc node2=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqualOrLessThan(),children2);
assertNotNull(node2);
List bothFilters=Lists.newArrayList();
bothFilters.add(node);
bothFilters.add(node2);
ExprNodeGenericFuncDesc both=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPAnd(),bothFilters);
List expectedRanges=Arrays.asList(new Range(new Key("f"),true,null,false));
AccumuloRangeGenerator rangeGenerator=new AccumuloRangeGenerator(handler,rowIdMapping,"rid");
Dispatcher disp=new DefaultRuleDispatcher(rangeGenerator,Collections.emptyMap(),null);
GraphWalker ogw=new DefaultGraphWalker(disp);
ArrayList topNodes=new ArrayList();
topNodes.add(both);
HashMap nodeOutput=new HashMap();
try {
ogw.startWalking(topNodes,nodeOutput);
}
catch ( SemanticException ex) {
throw new RuntimeException(ex);
}
Object result=nodeOutput.get(both);
Assert.assertNotNull(result);
Assert.assertTrue("Result from graph walk was not a List",result instanceof List);
@SuppressWarnings("unchecked") List actualRanges=(List)result;
Assert.assertEquals(expectedRanges,actualRanges);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testRangeConjunctionWithDisjunction() throws Exception {
ExprNodeDesc column=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"h");
List children=Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqualOrGreaterThan(),children);
assertNotNull(node);
ExprNodeDesc column2=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant2=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"d");
List children2=Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
ExprNodeDesc node2=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqualOrLessThan(),children2);
assertNotNull(node2);
ExprNodeDesc column3=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant3=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"q");
List children3=Lists.newArrayList();
children3.add(column3);
children3.add(constant3);
ExprNodeDesc node3=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqualOrGreaterThan(),children3);
assertNotNull(node3);
List orFilters=Lists.newArrayList();
orFilters.add(node2);
orFilters.add(node3);
ExprNodeGenericFuncDesc orNode=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPOr(),orFilters);
List andFilters=Lists.newArrayList();
andFilters.add(node);
andFilters.add(orNode);
ExprNodeGenericFuncDesc both=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPAnd(),andFilters);
List expectedRanges=Arrays.asList(new Range(new Key("q"),true,null,false));
AccumuloRangeGenerator rangeGenerator=new AccumuloRangeGenerator(handler,rowIdMapping,"rid");
Dispatcher disp=new DefaultRuleDispatcher(rangeGenerator,Collections.emptyMap(),null);
GraphWalker ogw=new DefaultGraphWalker(disp);
ArrayList topNodes=new ArrayList();
topNodes.add(both);
HashMap nodeOutput=new HashMap();
try {
ogw.startWalking(topNodes,nodeOutput);
}
catch ( SemanticException ex) {
throw new RuntimeException(ex);
}
Object result=nodeOutput.get(both);
Assert.assertNotNull(result);
Assert.assertTrue("Result from graph walk was not a List",result instanceof List);
@SuppressWarnings("unchecked") List actualRanges=(List)result;
Assert.assertEquals(expectedRanges,actualRanges);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testRangeDisjunction() throws Exception {
ExprNodeDesc column=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"f");
List children=Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqualOrGreaterThan(),children);
assertNotNull(node);
ExprNodeDesc column2=new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,"rid",null,false);
ExprNodeDesc constant2=new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo,"m");
List children2=Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
ExprNodeDesc node2=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPEqualOrLessThan(),children2);
assertNotNull(node2);
List bothFilters=Lists.newArrayList();
bothFilters.add(node);
bothFilters.add(node2);
ExprNodeGenericFuncDesc both=new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,new GenericUDFOPOr(),bothFilters);
List expectedRanges=Arrays.asList(new Range());
AccumuloRangeGenerator rangeGenerator=new AccumuloRangeGenerator(handler,rowIdMapping,"rid");
Dispatcher disp=new DefaultRuleDispatcher(rangeGenerator,Collections.emptyMap(),null);
GraphWalker ogw=new DefaultGraphWalker(disp);
ArrayList topNodes=new ArrayList();
topNodes.add(both);
HashMap nodeOutput=new HashMap();
try {
ogw.startWalking(topNodes,nodeOutput);
}
catch ( SemanticException ex) {
throw new RuntimeException(ex);
}
Object result=nodeOutput.get(both);
Assert.assertNotNull(result);
Assert.assertTrue("Result from graph walk was not a List",result instanceof List);
@SuppressWarnings("unchecked") List actualRanges=(List)result;
Assert.assertEquals(expectedRanges,actualRanges);
}
Class: org.apache.hadoop.hive.accumulo.predicate.TestPrimitiveComparisonFilter APIUtilityVerifier IterativeVerifier EqualityVerifier
@Test public void testBase64ConstantEncode(){
PrimitiveComparisonFilter filter=new PrimitiveComparisonFilter();
Map options=new HashMap();
for (int i=0; i < 500; i++) {
String constant=Integer.toString(i);
options.put(PrimitiveComparisonFilter.CONST_VAL,new String(Base64.encodeBase64(constant.getBytes())));
Assert.assertEquals(constant,new String(filter.getConstant(options)));
}
}
IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testNumericBase64ConstantEncode() throws IOException {
PrimitiveComparisonFilter filter=new PrimitiveComparisonFilter();
Map options=new HashMap();
IntWritable writable=new IntWritable();
ByteArrayOutputStream baos=new ByteArrayOutputStream();
DataOutputStream out=new DataOutputStream(baos);
for (int i=0; i < 500; i++) {
writable.set(i);
writable.write(out);
options.put(PrimitiveComparisonFilter.CONST_VAL,new String(Base64.encodeBase64(baos.toByteArray())));
byte[] bytes=filter.getConstant(options);
ByteArrayInputStream bais=new ByteArrayInputStream(bytes);
DataInputStream in=new DataInputStream(bais);
writable.readFields(in);
Assert.assertEquals(i,writable.get());
baos.reset();
}
}
Class: org.apache.hadoop.hive.accumulo.serde.TestAccumuloRowSerializer APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testBinarySerialization() throws IOException, SerDeException {
List columns=Arrays.asList("row","cq1","cq2","cq3");
List types=Arrays.asList(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.intTypeInfo,TypeInfoFactory.intTypeInfo,TypeInfoFactory.stringTypeInfo);
List typeNames=new ArrayList(types.size());
for ( TypeInfo type : types) {
typeNames.add(type.getTypeName());
}
Properties tableProperties=new Properties();
tableProperties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,":rowid,cf:cq1#b,cf:cq2#b,cf:cq3");
tableProperties.setProperty(serdeConstants.FIELD_DELIM," ");
tableProperties.setProperty(serdeConstants.LIST_COLUMNS,Joiner.on(',').join(columns));
tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES,Joiner.on(',').join(typeNames));
AccumuloSerDeParameters accumuloSerDeParams=new AccumuloSerDeParameters(new Configuration(),tableProperties,AccumuloSerDe.class.getSimpleName());
LazySerDeParameters serDeParams=accumuloSerDeParams.getSerDeParameters();
LazySimpleStructObjectInspector oi=(LazySimpleStructObjectInspector)LazyFactory.createLazyStructInspector(columns,types,serDeParams.getSeparators(),serDeParams.getNullSequence(),serDeParams.isLastColumnTakesRest(),serDeParams.isEscaped(),serDeParams.getEscapeChar());
AccumuloRowSerializer serializer=new AccumuloRowSerializer(0,serDeParams,accumuloSerDeParams.getColumnMappings(),new ColumnVisibility(),accumuloSerDeParams.getRowIdFactory());
LazyStruct obj=(LazyStruct)LazyFactory.createLazyObject(oi);
ByteArrayRef byteRef=new ByteArrayRef();
byteRef.setData(new byte[]{'r','o','w','1',' ','1','0',' ','2','0',' ','v','a','l','u','e'});
obj.init(byteRef,0,byteRef.getData().length);
Mutation m=(Mutation)serializer.serialize(obj,oi);
Assert.assertArrayEquals("row1".getBytes(),m.getRow());
List updates=m.getUpdates();
Assert.assertEquals(3,updates.size());
ByteArrayOutputStream baos=new ByteArrayOutputStream();
DataOutputStream out=new DataOutputStream(baos);
ColumnUpdate update=updates.get(0);
Assert.assertEquals("cf",new String(update.getColumnFamily()));
Assert.assertEquals("cq1",new String(update.getColumnQualifier()));
out.writeInt(10);
Assert.assertArrayEquals(baos.toByteArray(),update.getValue());
update=updates.get(1);
Assert.assertEquals("cf",new String(update.getColumnFamily()));
Assert.assertEquals("cq2",new String(update.getColumnQualifier()));
baos.reset();
out.writeInt(20);
Assert.assertArrayEquals(baos.toByteArray(),update.getValue());
update=updates.get(2);
Assert.assertEquals("cf",new String(update.getColumnFamily()));
Assert.assertEquals("cq3",new String(update.getColumnQualifier()));
Assert.assertEquals("value",new String(update.getValue()));
}
InternalCallVerifier EqualityVerifier
@Test public void testBufferResetBeforeUse() throws IOException {
ByteStream.Output output=new ByteStream.Output();
PrimitiveObjectInspector fieldObjectInspector=Mockito.mock(StringObjectInspector.class);
ColumnMapping mapping=Mockito.mock(ColumnMapping.class);
output.write("foobar".getBytes());
AccumuloRowSerializer serializer=Mockito.mock(AccumuloRowSerializer.class);
String object="hello";
Mockito.when(serializer.getSerializedValue(Mockito.any(ObjectInspector.class),Mockito.any(),Mockito.any(ByteStream.Output.class),Mockito.any(ColumnMapping.class))).thenCallRealMethod();
Mockito.when(fieldObjectInspector.getCategory()).thenReturn(ObjectInspector.Category.PRIMITIVE);
Mockito.when(fieldObjectInspector.getPrimitiveCategory()).thenReturn(PrimitiveCategory.STRING);
Mockito.when(fieldObjectInspector.getPrimitiveWritableObject(Mockito.any(Object.class))).thenReturn(new Text(object));
Mockito.when(mapping.getEncoding()).thenReturn(ColumnEncoding.STRING);
serializer.getSerializedValue(fieldObjectInspector,object,output,mapping);
Assert.assertEquals(0,output.size());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testVisibilityLabel() throws IOException, SerDeException {
List columns=Arrays.asList("row","cq1","cq2","cq3");
List types=Arrays.asList(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.intTypeInfo,TypeInfoFactory.intTypeInfo,TypeInfoFactory.stringTypeInfo);
List typeNames=new ArrayList(types.size());
for ( TypeInfo type : types) {
typeNames.add(type.getTypeName());
}
Properties tableProperties=new Properties();
tableProperties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,":rowid,cf:cq1#b,cf:cq2#b,cf:cq3");
tableProperties.setProperty(serdeConstants.FIELD_DELIM," ");
tableProperties.setProperty(serdeConstants.LIST_COLUMNS,Joiner.on(',').join(columns));
tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES,Joiner.on(',').join(typeNames));
AccumuloSerDeParameters accumuloSerDeParams=new AccumuloSerDeParameters(new Configuration(),tableProperties,AccumuloSerDe.class.getSimpleName());
LazySerDeParameters serDeParams=accumuloSerDeParams.getSerDeParameters();
LazySimpleStructObjectInspector oi=(LazySimpleStructObjectInspector)LazyFactory.createLazyStructInspector(columns,types,serDeParams.getSeparators(),serDeParams.getNullSequence(),serDeParams.isLastColumnTakesRest(),serDeParams.isEscaped(),serDeParams.getEscapeChar());
AccumuloRowSerializer serializer=new AccumuloRowSerializer(0,serDeParams,accumuloSerDeParams.getColumnMappings(),new ColumnVisibility("foo"),accumuloSerDeParams.getRowIdFactory());
LazyStruct obj=(LazyStruct)LazyFactory.createLazyObject(oi);
ByteArrayRef byteRef=new ByteArrayRef();
byteRef.setData(new byte[]{'r','o','w','1',' ','1','0',' ','2','0',' ','v','a','l','u','e'});
obj.init(byteRef,0,byteRef.getData().length);
Mutation m=(Mutation)serializer.serialize(obj,oi);
Assert.assertArrayEquals("row1".getBytes(),m.getRow());
List updates=m.getUpdates();
Assert.assertEquals(3,updates.size());
ByteArrayOutputStream baos=new ByteArrayOutputStream();
DataOutputStream out=new DataOutputStream(baos);
ColumnUpdate update=updates.get(0);
Assert.assertEquals("cf",new String(update.getColumnFamily()));
Assert.assertEquals("cq1",new String(update.getColumnQualifier()));
Assert.assertEquals("foo",new String(update.getColumnVisibility()));
out.writeInt(10);
Assert.assertArrayEquals(baos.toByteArray(),update.getValue());
update=updates.get(1);
Assert.assertEquals("cf",new String(update.getColumnFamily()));
Assert.assertEquals("cq2",new String(update.getColumnQualifier()));
Assert.assertEquals("foo",new String(update.getColumnVisibility()));
baos.reset();
out.writeInt(20);
Assert.assertArrayEquals(baos.toByteArray(),update.getValue());
update=updates.get(2);
Assert.assertEquals("cf",new String(update.getColumnFamily()));
Assert.assertEquals("cq3",new String(update.getColumnQualifier()));
Assert.assertEquals("foo",new String(update.getColumnVisibility()));
Assert.assertEquals("value",new String(update.getValue()));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testMapSerialization() throws IOException, SerDeException {
List columns=Arrays.asList("row","col");
List types=Arrays.asList(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo));
List typeNames=new ArrayList(types.size());
for ( TypeInfo type : types) {
typeNames.add(type.getTypeName());
}
Properties tableProperties=new Properties();
tableProperties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,":rowid,cf:*");
tableProperties.setProperty(serdeConstants.FIELD_DELIM," ");
tableProperties.setProperty(serdeConstants.COLLECTION_DELIM,",");
tableProperties.setProperty(serdeConstants.MAPKEY_DELIM,":");
tableProperties.setProperty(serdeConstants.LIST_COLUMNS,Joiner.on(',').join(columns));
tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES,Joiner.on(',').join(typeNames));
AccumuloSerDeParameters accumuloSerDeParams=new AccumuloSerDeParameters(new Configuration(),tableProperties,AccumuloSerDe.class.getSimpleName());
LazySerDeParameters serDeParams=accumuloSerDeParams.getSerDeParameters();
TypeInfo stringTypeInfo=TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME);
LazyStringObjectInspector stringOI=(LazyStringObjectInspector)LazyFactory.createLazyObjectInspector(stringTypeInfo,new byte[]{0},0,serDeParams.getNullSequence(),serDeParams.isEscaped(),serDeParams.getEscapeChar());
LazyMapObjectInspector mapOI=LazyObjectInspectorFactory.getLazySimpleMapObjectInspector(stringOI,stringOI,(byte)',',(byte)':',serDeParams.getNullSequence(),serDeParams.isEscaped(),serDeParams.getEscapeChar());
LazySimpleStructObjectInspector structOI=(LazySimpleStructObjectInspector)LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(columns,Arrays.asList(stringOI,mapOI),(byte)' ',serDeParams.getNullSequence(),serDeParams.isLastColumnTakesRest(),serDeParams.isEscaped(),serDeParams.getEscapeChar());
AccumuloRowSerializer serializer=new AccumuloRowSerializer(0,serDeParams,accumuloSerDeParams.getColumnMappings(),new ColumnVisibility(),accumuloSerDeParams.getRowIdFactory());
LazyStruct obj=(LazyStruct)LazyFactory.createLazyObject(structOI);
ByteArrayRef byteRef=new ByteArrayRef();
byteRef.setData("row1 cq1:10,cq2:20,cq3:value".getBytes());
obj.init(byteRef,0,byteRef.getData().length);
Mutation m=(Mutation)serializer.serialize(obj,structOI);
Assert.assertArrayEquals("row1".getBytes(),m.getRow());
List updates=m.getUpdates();
Assert.assertEquals(3,updates.size());
ColumnUpdate update=updates.get(0);
Assert.assertEquals("cf",new String(update.getColumnFamily()));
Assert.assertEquals("cq1",new String(update.getColumnQualifier()));
Assert.assertEquals("10",new String(update.getValue()));
update=updates.get(1);
Assert.assertEquals("cf",new String(update.getColumnFamily()));
Assert.assertEquals("cq2",new String(update.getColumnQualifier()));
Assert.assertEquals("20",new String(update.getValue()));
update=updates.get(2);
Assert.assertEquals("cf",new String(update.getColumnFamily()));
Assert.assertEquals("cq3",new String(update.getColumnQualifier()));
Assert.assertEquals("value",new String(update.getValue()));
}
Class: org.apache.hadoop.hive.accumulo.serde.TestAccumuloSerDe IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testCompositeKeyDeserialization() throws Exception {
Properties properties=new Properties();
Configuration conf=new Configuration();
properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,":rowID,cf:f1");
properties.setProperty(serdeConstants.LIST_COLUMNS,"row,field1");
properties.setProperty(serdeConstants.LIST_COLUMN_TYPES,"struct,string");
properties.setProperty(DelimitedAccumuloRowIdFactory.ACCUMULO_COMPOSITE_DELIMITER,"_");
properties.setProperty(AccumuloSerDeParameters.COMPOSITE_ROWID_FACTORY,DelimitedAccumuloRowIdFactory.class.getName());
serde.initialize(conf,properties);
AccumuloHiveRow row=new AccumuloHiveRow();
row.setRowId("p1_p2_p3");
row.add("cf","f1","v1".getBytes());
Object obj=serde.deserialize(row);
assertTrue(obj instanceof LazyAccumuloRow);
LazyAccumuloRow lazyRow=(LazyAccumuloRow)obj;
Object field0=lazyRow.getField(0);
assertNotNull(field0);
assertTrue(field0 instanceof LazyStruct);
LazyStruct struct=(LazyStruct)field0;
List fields=struct.getFieldsAsList();
assertEquals(3,fields.size());
for (int i=0; i < fields.size(); i++) {
assertEquals(LazyString.class,fields.get(i).getClass());
assertEquals("p" + (i + 1),fields.get(i).toString());
}
Object field1=lazyRow.getField(1);
assertNotNull(field1);
assertTrue("Expected instance of LazyString but was " + field1.getClass(),field1 instanceof LazyString);
assertEquals(field1.toString(),"v1");
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testStructOfMapSerialization() throws IOException, SerDeException {
List columns=Arrays.asList("row","col");
List structColNames=Arrays.asList("map1","map2");
TypeInfo mapTypeInfo=TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.stringTypeInfo,TypeInfoFactory.stringTypeInfo);
List types=Arrays.asList(TypeInfoFactory.getStructTypeInfo(structColNames,Arrays.asList(mapTypeInfo,mapTypeInfo)),TypeInfoFactory.stringTypeInfo);
Properties tableProperties=new Properties();
tableProperties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,":rowid,cf:cq");
tableProperties.setProperty(serdeConstants.LIST_COLUMNS,Joiner.on(',').join(columns));
tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES,Joiner.on(',').join(types));
AccumuloSerDeParameters accumuloSerDeParams=new AccumuloSerDeParameters(new Configuration(),tableProperties,AccumuloSerDe.class.getSimpleName());
LazySerDeParameters serDeParams=accumuloSerDeParams.getSerDeParameters();
byte[] seps=serDeParams.getSeparators();
TypeInfo stringTypeInfo=TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME);
LazyStringObjectInspector stringOI=(LazyStringObjectInspector)LazyFactory.createLazyObjectInspector(stringTypeInfo,new byte[]{0},0,serDeParams.getNullSequence(),serDeParams.isEscaped(),serDeParams.getEscapeChar());
LazyMapObjectInspector mapOI=LazyObjectInspectorFactory.getLazySimpleMapObjectInspector(stringOI,stringOI,seps[3],seps[4],serDeParams.getNullSequence(),serDeParams.isEscaped(),serDeParams.getEscapeChar());
LazySimpleStructObjectInspector rowStructOI=(LazySimpleStructObjectInspector)LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(structColNames,Arrays.asList(mapOI,mapOI),(byte)seps[2],serDeParams.getNullSequence(),serDeParams.isLastColumnTakesRest(),serDeParams.isEscaped(),serDeParams.getEscapeChar());
LazySimpleStructObjectInspector structOI=(LazySimpleStructObjectInspector)LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(columns,Arrays.asList(rowStructOI,stringOI),seps[1],serDeParams.getNullSequence(),serDeParams.isLastColumnTakesRest(),serDeParams.isEscaped(),serDeParams.getEscapeChar());
AccumuloRowSerializer serializer=new AccumuloRowSerializer(0,serDeParams,accumuloSerDeParams.getColumnMappings(),new ColumnVisibility(),accumuloSerDeParams.getRowIdFactory());
Map map1=new HashMap(), map2=new HashMap();
map1.put("key10","value10");
map1.put("key11","value11");
map2.put("key20","value20");
map2.put("key21","value21");
ByteArrayRef byteRef=new ByteArrayRef();
String accumuloRow="key10\5value10\4key11\5value11\3key20\5value20\4key21\5value21";
LazyStruct entireStruct=(LazyStruct)LazyFactory.createLazyObject(structOI);
byteRef.setData((accumuloRow + "\2foo").getBytes());
entireStruct.init(byteRef,0,byteRef.getData().length);
Mutation m=serializer.serialize(entireStruct,structOI);
Assert.assertArrayEquals(accumuloRow.getBytes(),m.getRow());
Assert.assertEquals(1,m.getUpdates().size());
ColumnUpdate update=m.getUpdates().get(0);
Assert.assertEquals("cf",new String(update.getColumnFamily()));
Assert.assertEquals("cq",new String(update.getColumnQualifier()));
Assert.assertEquals("foo",new String(update.getValue()));
AccumuloHiveRow haRow=new AccumuloHiveRow(new String(m.getRow()));
haRow.add("cf","cq","foo".getBytes());
LazyAccumuloRow lazyAccumuloRow=new LazyAccumuloRow(structOI);
lazyAccumuloRow.init(haRow,accumuloSerDeParams.getColumnMappings(),accumuloSerDeParams.getRowIdFactory());
List objects=lazyAccumuloRow.getFieldsAsList();
Assert.assertEquals(2,objects.size());
Assert.assertEquals("foo",objects.get(1).toString());
LazyStruct rowStruct=(LazyStruct)objects.get(0);
List rowObjects=rowStruct.getFieldsAsList();
Assert.assertEquals(2,rowObjects.size());
LazyMap rowMap=(LazyMap)rowObjects.get(0);
Map,?> actualMap=rowMap.getMap();
System.out.println("Actual map 1: " + actualMap);
Map actualStringMap=new HashMap();
for ( Entry,?> entry : actualMap.entrySet()) {
actualStringMap.put(entry.getKey().toString(),entry.getValue().toString());
}
Assert.assertEquals(map1,actualStringMap);
rowMap=(LazyMap)rowObjects.get(1);
actualMap=rowMap.getMap();
System.out.println("Actual map 2: " + actualMap);
actualStringMap=new HashMap();
for ( Entry,?> entry : actualMap.entrySet()) {
actualStringMap.put(entry.getKey().toString(),entry.getValue().toString());
}
Assert.assertEquals(map2,actualStringMap);
}
InternalCallVerifier EqualityVerifier
@Test public void testNoVisibilitySetsEmptyVisibility() throws SerDeException {
Properties properties=new Properties();
Configuration conf=new Configuration();
properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,"cf:f1,:rowID");
properties.setProperty(serdeConstants.LIST_COLUMNS,"field1,field2");
serde.initialize(conf,properties);
AccumuloRowSerializer serializer=serde.getSerializer();
Assert.assertEquals(new ColumnVisibility(),serializer.getVisibility());
}
InternalCallVerifier EqualityVerifier
@Test public void testColumnVisibilityForSerializer() throws SerDeException {
Properties properties=new Properties();
Configuration conf=new Configuration();
properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,"cf:f1,:rowID");
properties.setProperty(serdeConstants.LIST_COLUMNS,"field1,field2");
properties.setProperty(AccumuloSerDeParameters.VISIBILITY_LABEL_KEY,"foobar");
serde.initialize(conf,properties);
AccumuloRowSerializer serializer=serde.getSerializer();
Assert.assertEquals(new ColumnVisibility("foobar"),serializer.getVisibility());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testMapSerialization() throws Exception {
Properties properties=new Properties();
Configuration conf=new Configuration();
properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,":rowID,cf:vals");
properties.setProperty(serdeConstants.LIST_COLUMNS,"row,values");
properties.setProperty(serdeConstants.LIST_COLUMN_TYPES,"string,map");
properties.setProperty(serdeConstants.COLLECTION_DELIM,":");
properties.setProperty(serdeConstants.MAPKEY_DELIM,"=");
char collectionSeparator=':', kvSeparator='=';
serde.initialize(conf,properties);
AccumuloHiveRow row=new AccumuloHiveRow();
row.setRowId("r1");
row.add("cf","vals",("k1" + kvSeparator + "v1"+ collectionSeparator+ "k2"+ kvSeparator+ "v2"+ collectionSeparator+ "k3"+ kvSeparator+ "v3").getBytes());
Object obj=serde.deserialize(row);
assertNotNull(obj);
assertTrue(obj instanceof LazyAccumuloRow);
LazyAccumuloRow lazyRow=(LazyAccumuloRow)obj;
Object field0=lazyRow.getField(0);
assertNotNull(field0);
assertTrue(field0 instanceof LazyString);
assertEquals(row.getRowId(),((LazyString)field0).getWritableObject().toString());
Object field1=lazyRow.getField(1);
assertNotNull(field1);
assertTrue(field1 instanceof LazyMap);
LazyMap map=(LazyMap)field1;
Map untypedMap=map.getMap();
assertEquals(3,map.getMapSize());
Set expectedKeys=new HashSet();
expectedKeys.add("k1");
expectedKeys.add("k2");
expectedKeys.add("k3");
for ( Entry entry : untypedMap.entrySet()) {
assertNotNull(entry.getKey());
assertTrue(entry.getKey() instanceof LazyString);
LazyString key=(LazyString)entry.getKey();
assertNotNull(entry.getValue());
assertTrue(entry.getValue() instanceof LazyString);
LazyString value=(LazyString)entry.getValue();
String strKey=key.getWritableObject().toString(), strValue=value.getWritableObject().toString();
assertTrue(expectedKeys.remove(strKey));
assertEquals(2,strValue.length());
assertTrue(strValue.startsWith("v"));
assertTrue(strValue.endsWith(Character.toString(strKey.charAt(1))));
}
assertTrue("Did not find expected keys: " + expectedKeys,expectedKeys.isEmpty());
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void deserialization() throws Exception {
Properties properties=new Properties();
Configuration conf=new Configuration();
properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,":rowID,cf:f1,cf:f2,cf:f3");
properties.setProperty(serdeConstants.LIST_COLUMNS,"blah,field2,field3,field4");
serde.initialize(conf,properties);
AccumuloHiveRow row=new AccumuloHiveRow();
row.setRowId("r1");
row.add("cf","f1","v1".getBytes());
row.add("cf","f2","v2".getBytes());
Object obj=serde.deserialize(row);
assertTrue(obj instanceof LazyAccumuloRow);
LazyAccumuloRow lazyRow=(LazyAccumuloRow)obj;
Object field0=lazyRow.getField(0);
assertNotNull(field0);
assertTrue(field0 instanceof LazyString);
assertEquals(field0.toString(),"r1");
Object field1=lazyRow.getField(1);
assertNotNull(field1);
assertTrue("Expected instance of LazyString but was " + field1.getClass(),field1 instanceof LazyString);
assertEquals(field1.toString(),"v1");
Object field2=lazyRow.getField(2);
assertNotNull(field2);
assertTrue(field2 instanceof LazyString);
assertEquals(field2.toString(),"v2");
}
APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testArraySerialization() throws Exception {
Properties properties=new Properties();
Configuration conf=new Configuration();
properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,":rowID,cf:vals");
properties.setProperty(serdeConstants.LIST_COLUMNS,"row,values");
properties.setProperty(serdeConstants.LIST_COLUMN_TYPES,"string,array");
properties.setProperty(serdeConstants.COLLECTION_DELIM,":");
char separator=':';
serde.initialize(conf,properties);
AccumuloHiveRow row=new AccumuloHiveRow();
row.setRowId("r1");
row.add("cf","vals",("value1" + separator + "value2"+ separator+ "value3").getBytes());
Object obj=serde.deserialize(row);
assertNotNull(obj);
assertTrue(obj instanceof LazyAccumuloRow);
LazyAccumuloRow lazyRow=(LazyAccumuloRow)obj;
Object field0=lazyRow.getField(0);
assertNotNull(field0);
assertTrue(field0 instanceof LazyString);
assertEquals(row.getRowId(),((LazyString)field0).getWritableObject().toString());
Object field1=lazyRow.getField(1);
assertNotNull(field1);
assertTrue(field1 instanceof LazyArray);
LazyArray array=(LazyArray)field1;
List values=array.getList();
assertEquals(3,values.size());
for (int i=0; i < 3; i++) {
Object o=values.get(i);
assertNotNull(o);
assertTrue(o instanceof LazyString);
assertEquals("value" + (i + 1),((LazyString)o).getWritableObject().toString());
}
}
Class: org.apache.hadoop.hive.accumulo.serde.TestAccumuloSerDeParameters APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testParseAuthorizationsFromConf() throws SerDeException {
Configuration conf=new Configuration(false);
conf.set(AccumuloSerDeParameters.AUTHORIZATIONS_KEY,"foo,bar");
Authorizations auths=AccumuloSerDeParameters.getAuthorizationsFromConf(conf);
Assert.assertEquals(new Authorizations("foo,bar"),auths);
}
InternalCallVerifier EqualityVerifier
@Test public void testParseAuthorizationsFromnProperties() throws SerDeException {
Configuration conf=new Configuration();
Properties properties=new Properties();
properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,":rowid,cf:f2,cf:f3");
properties.setProperty(serdeConstants.LIST_COLUMNS,"field1,field2,field3");
properties.setProperty(serdeConstants.LIST_COLUMN_TYPES,"string,string,string");
properties.setProperty(AccumuloSerDeParameters.AUTHORIZATIONS_KEY,"foo,bar");
AccumuloSerDeParameters params=new AccumuloSerDeParameters(conf,properties,AccumuloSerDe.class.getName());
Authorizations auths=params.getAuthorizations();
Assert.assertEquals(new Authorizations("foo,bar"),auths);
}
InternalCallVerifier EqualityVerifier
@Test public void testParseColumnVisibility() throws SerDeException {
Properties properties=new Properties();
Configuration conf=new Configuration();
properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,":rowid,cf:f2,cf:f3");
properties.setProperty(serdeConstants.LIST_COLUMNS,"field1,field2,field3");
properties.setProperty(serdeConstants.LIST_TYPE_NAME,"string,string,string");
properties.setProperty(AccumuloSerDeParameters.VISIBILITY_LABEL_KEY,"foo&bar");
AccumuloSerDeParameters params=new AccumuloSerDeParameters(conf,properties,AccumuloSerDe.class.getName());
ColumnVisibility cv=params.getTableVisibilityLabel();
Assert.assertEquals(new ColumnVisibility("foo&bar"),cv);
}
Class: org.apache.hadoop.hive.accumulo.serde.TestDefaultAccumuloRowIdFactory APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testCorrectComplexInspectors() throws SerDeException {
AccumuloSerDe accumuloSerDe=new AccumuloSerDe();
Properties properties=new Properties();
Configuration conf=new Configuration();
properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,":rowID,cf:cq");
properties.setProperty(serdeConstants.LIST_COLUMNS,"row,col");
properties.setProperty(serdeConstants.LIST_COLUMN_TYPES,"struct,map");
accumuloSerDe.initialize(conf,properties);
AccumuloRowIdFactory factory=accumuloSerDe.getParams().getRowIdFactory();
List columnTypes=accumuloSerDe.getParams().getHiveColumnTypes();
ColumnMapper mapper=accumuloSerDe.getParams().getColumnMapper();
LazySerDeParameters serDeParams=accumuloSerDe.getParams().getSerDeParameters();
List OIs=accumuloSerDe.getColumnObjectInspectors(columnTypes,serDeParams,mapper.getColumnMappings(),factory);
Assert.assertEquals(2,OIs.size());
Assert.assertEquals(LazySimpleStructObjectInspector.class,OIs.get(0).getClass());
Assert.assertEquals(LazyMapObjectInspector.class,OIs.get(1).getClass());
LazySimpleStructObjectInspector structOI=(LazySimpleStructObjectInspector)OIs.get(0);
Assert.assertEquals(2,(int)structOI.getSeparator());
LazyMapObjectInspector mapOI=(LazyMapObjectInspector)OIs.get(1);
Assert.assertEquals(2,(int)mapOI.getItemSeparator());
Assert.assertEquals(3,(int)mapOI.getKeyValueSeparator());
}
InternalCallVerifier EqualityVerifier
@Test public void testCorrectPrimitiveInspectors() throws SerDeException {
AccumuloSerDe accumuloSerDe=new AccumuloSerDe();
Properties properties=new Properties();
Configuration conf=new Configuration();
properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,":rowID,cf:cq");
properties.setProperty(serdeConstants.LIST_COLUMNS,"row,col");
properties.setProperty(serdeConstants.LIST_COLUMN_TYPES,"string,int");
accumuloSerDe.initialize(conf,properties);
AccumuloRowIdFactory factory=accumuloSerDe.getParams().getRowIdFactory();
List columnTypes=accumuloSerDe.getParams().getHiveColumnTypes();
ColumnMapper mapper=accumuloSerDe.getParams().getColumnMapper();
LazySerDeParameters serDeParams=accumuloSerDe.getParams().getSerDeParameters();
List OIs=accumuloSerDe.getColumnObjectInspectors(columnTypes,serDeParams,mapper.getColumnMappings(),factory);
Assert.assertEquals(2,OIs.size());
Assert.assertEquals(LazyStringObjectInspector.class,OIs.get(0).getClass());
Assert.assertEquals(LazyIntObjectInspector.class,OIs.get(1).getClass());
}
Class: org.apache.hadoop.hive.cli.TestCliSessionState EqualityVerifier
/**
* test default db name
*/
@Test public void testgetDbName() throws Exception {
SessionState.start(new HiveConf());
assertEquals(MetaStoreUtils.DEFAULT_DATABASE_NAME,SessionState.get().getCurrentDatabase());
}
Class: org.apache.hadoop.hive.cli.TestOptionsProcessor BooleanVerifier EqualityVerifier HybridVerifier
/**
* Test set fileName
*/
@Test public void testFiles(){
OptionsProcessor processor=new OptionsProcessor();
String[] args={"-i","f1","-i","f2","-f","fileName"};
assertTrue(processor.process_stage1(args));
CliSessionState sessionState=new CliSessionState(new HiveConf());
processor.process_stage2(sessionState);
assertEquals("fileName",sessionState.fileName);
assertEquals(2,sessionState.initFiles.size());
assertEquals("f1",sessionState.initFiles.get(0));
assertEquals("f2",sessionState.initFiles.get(1));
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* test pase parameters for Hive
*/
@Test public void testOptionsProcessor(){
OptionsProcessor processor=new OptionsProcessor();
System.clearProperty("hiveconf");
System.clearProperty("define");
System.clearProperty("hivevar");
assertNull(System.getProperty("_A"));
String[] args={"-hiveconf","_A=B","-define","C=D","-hivevar","X=Y","-S","true","-database","testDb","-e","execString","-v","true"};
assertTrue(processor.process_stage1(args));
assertEquals("B",System.getProperty("_A"));
assertEquals("D",processor.getHiveVariables().get("C"));
assertEquals("Y",processor.getHiveVariables().get("X"));
CliSessionState sessionState=new CliSessionState(new HiveConf());
processor.process_stage2(sessionState);
assertEquals("testDb",sessionState.database);
assertEquals("execString",sessionState.execString);
assertEquals(0,sessionState.initFiles.size());
assertTrue(sessionState.getIsVerbose());
assertTrue(sessionState.getIsSilent());
}
Class: org.apache.hadoop.hive.cli.TestRCFileCat APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
/**
* test parse file
*/
@Test public void testRCFileCat() throws Exception {
File template=File.createTempFile("hive","tmpTest");
Configuration configuration=new Configuration();
byte[][] record_1={Bytes.toBytes("123"),Bytes.toBytes("456"),Bytes.toBytes("789"),Bytes.toBytes("1000"),Bytes.toBytes("5.3"),Bytes.toBytes("hive and hadoop"),new byte[0],Bytes.toBytes("NULL")};
byte[][] record_2={Bytes.toBytes("100"),Bytes.toBytes("200"),Bytes.toBytes("123"),Bytes.toBytes("1000"),Bytes.toBytes("5.3"),Bytes.toBytes("hive and hadoop"),new byte[0],Bytes.toBytes("NULL")};
byte[][] record_3={Bytes.toBytes("200"),Bytes.toBytes("400"),Bytes.toBytes("678"),Bytes.toBytes("1000"),Bytes.toBytes("4.8"),Bytes.toBytes("hive and hadoop"),new byte[0],Bytes.toBytes("TEST")};
RCFileOutputFormat.setColumnNumber(configuration,8);
Path file=new Path(template.getAbsolutePath());
FileSystem fs=FileSystem.getLocal(configuration);
RCFile.Writer writer=new RCFile.Writer(fs,configuration,file,null,RCFile.createMetadata(new Text("apple"),new Text("block"),new Text("cat"),new Text("dog")),new DefaultCodec());
write(writer,record_1);
write(writer,record_2);
write(writer,record_3);
writer.close();
RCFileCat fileCat=new RCFileCat();
RCFileCat.test=true;
fileCat.setConf(new Configuration());
PrintStream oldOutPrintStream=System.out;
PrintStream oldErrPrintStream=System.err;
ByteArrayOutputStream dataOut=new ByteArrayOutputStream();
ByteArrayOutputStream dataErr=new ByteArrayOutputStream();
System.setOut(new PrintStream(dataOut));
System.setErr(new PrintStream(dataErr));
try {
String[] params={"--verbose","file://" + template.toURI().getPath()};
assertEquals(0,fileCat.run(params));
assertTrue(dataOut.toString().contains("123\t456\t789\t1000\t5.3\thive and hadoop\t\tNULL"));
assertTrue(dataOut.toString().contains("100\t200\t123\t1000\t5.3\thive and hadoop\t\tNULL"));
assertTrue(dataOut.toString().contains("200\t400\t678\t1000\t4.8\thive and hadoop\t\tTEST"));
dataOut.reset();
params=new String[]{"--start=-10","--file-sizes","file://" + template.toURI().getPath()};
assertEquals(0,fileCat.run(params));
assertTrue(dataOut.toString().contains("File size (uncompressed): 105. File size (compressed): 134. Number of rows: 3."));
dataOut.reset();
params=new String[]{"--start=0","--column-sizes","file://" + template.toURI().getPath()};
assertEquals(0,fileCat.run(params));
assertTrue(dataOut.toString().contains("0\t9\t17"));
assertTrue(dataOut.toString().contains("1\t9\t17"));
assertTrue(dataOut.toString().contains("2\t9\t17"));
assertTrue(dataOut.toString().contains("3\t12\t14"));
assertTrue(dataOut.toString().contains("4\t9\t17"));
assertTrue(dataOut.toString().contains("5\t45\t26"));
dataOut.reset();
params=new String[]{"--start=0","--column-sizes-pretty","file://" + template.toURI().getPath()};
assertEquals(0,fileCat.run(params));
assertTrue(dataOut.toString().contains("Column 0: Uncompressed size: 9 Compressed size: 17"));
assertTrue(dataOut.toString().contains("Column 1: Uncompressed size: 9 Compressed size: 17"));
assertTrue(dataOut.toString().contains("Column 2: Uncompressed size: 9 Compressed size: 17"));
assertTrue(dataOut.toString().contains("Column 3: Uncompressed size: 12 Compressed size: 14"));
assertTrue(dataOut.toString().contains("Column 4: Uncompressed size: 9 Compressed size: 17"));
assertTrue(dataOut.toString().contains("Column 5: Uncompressed size: 45 Compressed size: 26"));
params=new String[]{};
assertEquals(-1,fileCat.run(params));
assertTrue(dataErr.toString().contains("RCFileCat [--start=start_offet] [--length=len] [--verbose] " + "[--column-sizes | --column-sizes-pretty] [--file-sizes] fileName"));
dataErr.reset();
params=new String[]{"--fakeParameter","file://" + template.toURI().getPath()};
assertEquals(-1,fileCat.run(params));
assertTrue(dataErr.toString().contains("RCFileCat [--start=start_offet] [--length=len] [--verbose] " + "[--column-sizes | --column-sizes-pretty] [--file-sizes] fileName"));
}
finally {
System.setOut(oldOutPrintStream);
System.setErr(oldErrPrintStream);
}
}
Class: org.apache.hadoop.hive.common.TestValidReadTxnList InternalCallVerifier EqualityVerifier
@Test public void readWriteConfig() throws Exception {
long[] exceptions=new long[1000];
for (int i=0; i < 1000; i++) exceptions[i]=i + 100;
ValidTxnList txnList=new ValidReadTxnList(exceptions,2000);
String str=txnList.writeToString();
Configuration conf=new Configuration();
conf.set(ValidTxnList.VALID_TXNS_KEY,str);
File tmpFile=File.createTempFile("TestValidTxnImpl","readWriteConfig");
DataOutputStream out=new DataOutputStream(new FileOutputStream(tmpFile));
conf.write(out);
out.close();
DataInputStream in=new DataInputStream(new FileInputStream(tmpFile));
Configuration newConf=new Configuration();
newConf.readFields(in);
Assert.assertEquals(str,newConf.get(ValidTxnList.VALID_TXNS_KEY));
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void exceptions() throws Exception {
ValidTxnList txnList=new ValidReadTxnList(new long[]{2L,4L},5);
String str=txnList.writeToString();
Assert.assertEquals("5:2:4",str);
ValidTxnList newList=new ValidReadTxnList();
newList.readFromString(str);
Assert.assertTrue(newList.isTxnValid(1));
Assert.assertFalse(newList.isTxnValid(2));
Assert.assertTrue(newList.isTxnValid(3));
Assert.assertFalse(newList.isTxnValid(4));
Assert.assertTrue(newList.isTxnValid(5));
Assert.assertFalse(newList.isTxnValid(6));
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void noExceptions() throws Exception {
ValidTxnList txnList=new ValidReadTxnList(new long[0],1);
String str=txnList.writeToString();
Assert.assertEquals("1:",str);
ValidTxnList newList=new ValidReadTxnList();
newList.readFromString(str);
Assert.assertTrue(newList.isTxnValid(1));
Assert.assertFalse(newList.isTxnValid(2));
}
Class: org.apache.hadoop.hive.common.metrics.TestLegacyMetrics APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testScopeConcurrency() throws Exception {
metrics.startStoredScope(scopeName);
LegacyMetrics.LegacyMetricsScope fooScope=(LegacyMetrics.LegacyMetricsScope)metrics.getStoredScope(scopeName);
final int threads=10;
ExecutorService executorService=Executors.newFixedThreadPool(threads);
for (int i=0; i < threads; i++) {
final int n=i;
executorService.submit(new Callable(){
@Override public Void call() throws Exception {
testScopeImpl(n);
return null;
}
}
);
}
executorService.shutdown();
assertTrue(executorService.awaitTermination(periodMs * 3 * threads,TimeUnit.MILLISECONDS));
fooScope=(LegacyMetrics.LegacyMetricsScope)metrics.getStoredScope(scopeName);
assertEquals(Long.valueOf(3 * threads),fooScope.getNumCounter());
assertTrue(fooScope.getTimeCounter().longValue() > 3 * periodMs * threads);
Double avgT=(Double)metrics.get("foo.avg_t");
assertTrue(avgT.doubleValue() > periodMs);
metrics.endStoredScope(scopeName);
}
APIUtilityVerifier BranchVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testMetricsMBean() throws Exception {
MBeanServer mbs=ManagementFactory.getPlatformMBeanServer();
final ObjectName oname=new ObjectName("org.apache.hadoop.hive.common.metrics:type=MetricsMBean");
MBeanInfo mBeanInfo=mbs.getMBeanInfo(oname);
assertEquals(MetricsMBeanImpl.class.getName(),mBeanInfo.getClassName());
MBeanOperationInfo[] oops=mBeanInfo.getOperations();
boolean resetFound=false;
for ( MBeanOperationInfo op : oops) {
if ("reset".equals(op.getName())) {
resetFound=true;
break;
}
}
assertTrue(resetFound);
Attribute attr=new Attribute("fooMetric",Long.valueOf(-77));
mbs.setAttribute(oname,attr);
mBeanInfo=mbs.getMBeanInfo(oname);
MBeanAttributeInfo[] attrinuteInfos=mBeanInfo.getAttributes();
assertEquals(1,attrinuteInfos.length);
boolean attrFound=false;
for ( MBeanAttributeInfo info : attrinuteInfos) {
if ("fooMetric".equals(info.getName())) {
assertEquals("java.lang.Long",info.getType());
assertTrue(info.isReadable());
assertTrue(info.isWritable());
assertFalse(info.isIs());
attrFound=true;
break;
}
}
assertTrue(attrFound);
Object v=mbs.getAttribute(oname,"fooMetric");
assertEquals(Long.valueOf(-77),v);
Object result=mbs.invoke(oname,"reset",new Object[0],new String[0]);
assertNull(result);
v=mbs.getAttribute(oname,"fooMetric");
assertEquals(Long.valueOf(0),v);
}
BooleanVerifier InternalCallVerifier IdentityVerifier EqualityVerifier HybridVerifier
@Test public void testScopeSingleThread() throws Exception {
metrics.startStoredScope(scopeName);
final LegacyMetrics.LegacyMetricsScope fooScope=(LegacyMetrics.LegacyMetricsScope)metrics.getStoredScope(scopeName);
expectIOE(new Callable(){
@Override public Long call() throws Exception {
Long num=fooScope.getNumCounter();
return num;
}
}
);
expectIOE(new Callable(){
@Override public Long call() throws Exception {
Long time=fooScope.getTimeCounter();
return time;
}
}
);
expectIOE(new Callable(){
@Override public Void call() throws Exception {
fooScope.open();
return null;
}
}
);
assertSame(fooScope,metrics.getStoredScope(scopeName));
Thread.sleep(periodMs + 1);
metrics.endStoredScope(scopeName);
expectIOE(new Callable(){
@Override public Void call() throws Exception {
metrics.endStoredScope(scopeName);
return null;
}
}
);
assertEquals(Long.valueOf(1),fooScope.getNumCounter());
final long t1=fooScope.getTimeCounter().longValue();
assertTrue(t1 > periodMs);
assertSame(fooScope,metrics.getStoredScope(scopeName));
metrics.startStoredScope(scopeName);
expectIOE(new Callable(){
@Override public Void call() throws Exception {
metrics.startStoredScope(scopeName);
return null;
}
}
);
assertEquals(Long.valueOf(1),fooScope.getNumCounter());
assertEquals(t1,fooScope.getTimeCounter().longValue());
assertSame(fooScope,metrics.getStoredScope(scopeName));
Thread.sleep(periodMs + 1);
fooScope.reopen();
assertEquals(Long.valueOf(2),fooScope.getNumCounter());
assertTrue(fooScope.getTimeCounter().longValue() > 2 * periodMs);
Thread.sleep(periodMs + 1);
fooScope.close();
assertEquals(Long.valueOf(3),fooScope.getNumCounter());
assertTrue(fooScope.getTimeCounter().longValue() > 3 * periodMs);
Double avgT=(Double)metrics.get("foo.avg_t");
assertTrue(avgT.doubleValue() > periodMs);
}
Class: org.apache.hadoop.hive.common.metrics.metrics2.TestCodahaleMetrics APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testFileReporting() throws Exception {
int runs=5;
for (int i=0; i < runs; i++) {
MetricsFactory.getInstance().incrementCounter("count2");
Thread.sleep(100);
}
Thread.sleep(2000);
byte[] jsonData=Files.readAllBytes(Paths.get(jsonReportFile.getAbsolutePath()));
ObjectMapper objectMapper=new ObjectMapper();
JsonNode rootNode=objectMapper.readTree(jsonData);
JsonNode countersNode=rootNode.path("counters");
JsonNode methodCounterNode=countersNode.path("count2");
JsonNode countNode=methodCounterNode.path("count");
Assert.assertEquals(countNode.asInt(),5);
}
InternalCallVerifier EqualityVerifier
@Test public void testCount() throws Exception {
int runs=5;
for (int i=0; i < runs; i++) {
MetricsFactory.getInstance().incrementCounter("count1");
}
Counter counter=metricRegistry.getCounters().get("count1");
Assert.assertEquals(5L,counter.getCount());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testGauge() throws Exception {
TestMetricsVariable testVar=new TestMetricsVariable();
testVar.setValue(20);
MetricsFactory.getInstance().addGauge("gauge1",testVar);
Thread.sleep(2000);
byte[] jsonData=Files.readAllBytes(Paths.get(jsonReportFile.getAbsolutePath()));
ObjectMapper objectMapper=new ObjectMapper();
JsonNode rootNode=objectMapper.readTree(jsonData);
JsonNode gaugesNode=rootNode.path("gauges");
JsonNode methodGaugeNode=gaugesNode.path("gauge1");
JsonNode countNode=methodGaugeNode.path("value");
Assert.assertEquals(countNode.asInt(),testVar.getValue());
testVar.setValue(40);
Thread.sleep(2000);
jsonData=Files.readAllBytes(Paths.get(jsonReportFile.getAbsolutePath()));
rootNode=objectMapper.readTree(jsonData);
gaugesNode=rootNode.path("gauges");
methodGaugeNode=gaugesNode.path("gauge1");
countNode=methodGaugeNode.path("value");
Assert.assertEquals(countNode.asInt(),testVar.getValue());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testScope() throws Exception {
int runs=5;
for (int i=0; i < runs; i++) {
MetricsFactory.getInstance().startStoredScope("method1");
MetricsFactory.getInstance().endStoredScope("method1");
}
Timer timer=metricRegistry.getTimers().get("api_method1");
Assert.assertEquals(5,timer.getCount());
Assert.assertTrue(timer.getMeanRate() > 0);
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testConcurrency() throws Exception {
int threads=4;
ExecutorService executorService=Executors.newFixedThreadPool(threads);
for (int i=0; i < threads; i++) {
final int n=i;
executorService.submit(new Callable(){
@Override public Void call() throws Exception {
MetricsFactory.getInstance().startStoredScope("method2");
MetricsFactory.getInstance().endStoredScope("method2");
return null;
}
}
);
}
executorService.shutdown();
assertTrue(executorService.awaitTermination(10000,TimeUnit.MILLISECONDS));
Timer timer=metricRegistry.getTimers().get("api_method2");
Assert.assertEquals(4,timer.getCount());
Assert.assertTrue(timer.getMeanRate() > 0);
}
Class: org.apache.hadoop.hive.common.type.TestDecimal128 InternalCallVerifier EqualityVerifier
@Test public void testSubtract(){
Decimal128 result=new Decimal128();
Decimal128.subtract(one,two,result,(short)2);
assertEquals(0,new Decimal128(-1L,(short)0).compareTo(result));
Decimal128.subtract(two,one,result,(short)2);
assertEquals(0,new Decimal128(1L,(short)0).compareTo(result));
Decimal128.subtract(two,two,result,(short)1);
assertEquals(0,zero.compareTo(result));
assertEquals(0,result.getSignum());
long l1=123456789012345L;
long l2=987654321097L;
long sub=l1 - l2;
Decimal128 left=new Decimal128(l1,(short)3);
Decimal128 right=new Decimal128(l2,(short)5);
Decimal128.subtract(left,right,result,(short)2);
assertEquals(0,new Decimal128(sub,(short)0).compareTo(result));
Decimal128.subtract(right,left,result,(short)2);
assertEquals(0,new Decimal128(-sub,(short)0).compareTo(result));
Decimal128 val=new Decimal128("1.123",(short)3);
val.addDestructive(new Decimal128("4.321",(short)3),(short)3);
assertEquals("5.444",val.toFormalString());
}
InternalCallVerifier EqualityVerifier
@Test public void testDivide(){
Decimal128 quotient=new Decimal128();
Decimal128.divide(two,one,quotient,(short)2);
assertEquals(0,quotient.compareTo(two));
Decimal128.divide(two,two,quotient,(short)2);
assertEquals(0,quotient.compareTo(one));
Decimal128 three=new Decimal128(3);
Decimal128 four=new Decimal128(4);
Decimal128.divide(three,four,quotient,(short)2);
assertEquals("0.75",quotient.toFormalString());
Decimal128.divide(three,four,quotient,(short)1);
assertEquals("0.8",quotient.toFormalString());
Decimal128.divide(three,four,quotient,(short)0);
assertEquals("1",quotient.toFormalString());
Decimal128 two=new Decimal128(2);
Decimal128.divide(two,three,quotient,(short)4);
assertEquals("0.6667",quotient.toFormalString());
}
InternalCallVerifier EqualityVerifier
@Test public void testUpdateWithScale(){
Decimal128 d1=new Decimal128(1234.123,(short)4);
Decimal128 d2=new Decimal128(0,(short)3);
d2.update(d1,(short)3);
assertEquals(0,d1.compareTo(d2));
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testHashCode(){
assertTrue(one.hashCode() != two.hashCode());
assertTrue(zero.hashCode() != one.hashCode());
assertTrue(zero.hashCode() != two.hashCode());
assertEquals(zero.hashCode(),new Decimal128(0).hashCode());
assertEquals(one.hashCode(),new Decimal128(1).hashCode());
assertEquals(two.hashCode(),new Decimal128(2).hashCode());
Decimal128 oneScaled=new Decimal128(1L,(short)3);
oneScaled.changeScaleDestructive((short)0);
assertEquals(one.hashCode(),oneScaled.hashCode());
}
InternalCallVerifier EqualityVerifier
@Test public void testToHiveDecimalString(){
Decimal128 d1=new Decimal128("4134.923076923077",(short)15);
assertEquals("4134.923076923077",d1.getHiveDecimalString());
Decimal128 d2=new Decimal128("0.00923076923",(short)15);
assertEquals("0.00923076923",d2.getHiveDecimalString());
Decimal128 d3=new Decimal128("0.00923076000",(short)15);
assertEquals("0.00923076",d3.getHiveDecimalString());
Decimal128 d4=new Decimal128("4294967296.01",(short)15);
assertEquals("4294967296.01",d4.getHiveDecimalString());
Decimal128 d5=new Decimal128("4294967296.01",(short)2);
assertEquals("4294967296.01",d5.getHiveDecimalString());
Decimal128 d6=new Decimal128();
HiveDecimal hd1=HiveDecimal.create(new BigInteger("42949672"));
d6.update(hd1.bigDecimalValue());
assertEquals(hd1.toString(),d6.getHiveDecimalString());
Decimal128 d7=new Decimal128();
HiveDecimal hd2=HiveDecimal.create(new BigDecimal("0.0"));
d7.update(hd2.bigDecimalValue());
assertEquals(hd2.toString(),d7.getHiveDecimalString());
Decimal128 d8=new Decimal128();
HiveDecimal hd3=HiveDecimal.create(new BigDecimal("0.00023000"));
d8.update(hd3.bigDecimalValue());
assertEquals(hd3.toString(),d8.getHiveDecimalString());
Decimal128 d9=new Decimal128();
HiveDecimal hd4=HiveDecimal.create(new BigDecimal("0.1"));
d9.update(hd4.bigDecimalValue());
assertEquals(hd4.toString(),d9.getHiveDecimalString());
Decimal128 d10=new Decimal128();
HiveDecimal hd5=HiveDecimal.create(new BigDecimal("-00.100"));
d10.update(hd5.bigDecimalValue());
assertEquals(hd5.toString(),d10.getHiveDecimalString());
Decimal128 d11=new Decimal128();
HiveDecimal hd6=HiveDecimal.create(new BigDecimal("00.1"));
d11.update(hd6.bigDecimalValue());
assertEquals(hd6.toString(),d11.getHiveDecimalString());
Decimal128 d12=new Decimal128(27.000,(short)3);
HiveDecimal hd7=HiveDecimal.create(new BigDecimal("27.000"));
assertEquals(hd7.toString(),d12.getHiveDecimalString());
assertEquals("27",d12.getHiveDecimalString());
Decimal128 d13=new Decimal128(1234123000,(short)3);
HiveDecimal hd8=HiveDecimal.create(new BigDecimal("1234123000"));
assertEquals(hd8.toString(),d13.getHiveDecimalString());
assertEquals("1234123000",d13.getHiveDecimalString());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testEquals(){
assertTrue(!one.equals(two));
assertTrue(!zero.equals(one));
assertTrue(!zero.equals(two));
assertEquals(zero,new Decimal128(0));
assertEquals(one,new Decimal128(1));
assertEquals(two,new Decimal128(2));
Decimal128 oneScaled=new Decimal128(1L,(short)3);
oneScaled.changeScaleDestructive((short)0);
assertEquals(one,oneScaled);
}
InternalCallVerifier EqualityVerifier
@Test public void testToLong(){
Decimal128 d=new Decimal128("1.25",(short)2);
assertEquals(1,d.longValue());
d.update("4294967295",(short)0);
assertEquals(4294967295L,d.longValue());
d.update("4294967296",(short)0);
assertEquals(4294967296L,d.longValue());
d.update("-4294967295",(short)0);
assertEquals(-4294967295L,d.longValue());
d.update("-4294967296",(short)0);
assertEquals(-4294967296L,d.longValue());
d.update("4294967295.01",(short)2);
assertEquals(4294967295L,d.longValue());
d.update("4294967296.01",(short)2);
assertEquals(4294967296L,d.longValue());
d.update(37.678,(short)5);
HiveDecimal hd=HiveDecimal.create(BigDecimal.valueOf(37.678));
assertEquals(hd.longValue(),d.longValue());
}
InternalCallVerifier EqualityVerifier
@Test public void testFloatValue(){
Decimal128 quotient=new Decimal128();
Decimal128 three=new Decimal128(3);
Decimal128 four=new Decimal128(9);
Decimal128.divide(three,four,quotient,(short)38);
assertEquals(0.3333333333333333f,quotient.floatValue(),0.00000000001f);
Decimal128 minusThree=new Decimal128(-3);
Decimal128.divide(minusThree,four,quotient,(short)38);
assertEquals(-0.333333333333333f,quotient.floatValue(),0.00000000001f);
}
EqualityVerifier
@Test public void testSqrtAsDouble(){
Decimal128 val1=new Decimal128("1.00435134913958923485982394892384",(short)36);
Decimal128 val2=new Decimal128("1.00345982739817298323423423",(short)36);
assertEquals(1.00217331292526d,val1.sqrtAsDouble(),0.000000000000001d);
assertEquals(1.00172841998127d,val2.sqrtAsDouble(),0.000000000000001d);
}
EqualityVerifier
@Test public void testAdd(){
Decimal128 result=new Decimal128();
Decimal128.add(one,two,result,(short)2);
assertEquals(0,new Decimal128(3L,(short)0).compareTo(result));
Decimal128.add(two,two,result,(short)1);
assertEquals(0,new Decimal128(4L,(short)0).compareTo(result));
long l1=123456789012345L;
long l2=987654321097L;
long sum=l1 + l2;
Decimal128 left=new Decimal128(l1,(short)3);
Decimal128 right=new Decimal128(l2,(short)5);
Decimal128.add(left,right,result,(short)2);
assertEquals(0,new Decimal128(sum,(short)0).compareTo(result));
Decimal128.add(right,left,result,(short)2);
assertEquals(0,new Decimal128(sum,(short)0).compareTo(result));
}
EqualityVerifier
@Test public void testPowAsDouble(){
Decimal128 val1=new Decimal128("1.00435134913958923485982394892384",(short)36);
assertEquals(1.004366436877081d,val1.powAsDouble(1.00345982739817298323423423d),0.000000000000001d);
Decimal128 val2=new Decimal128("1.001",(short)36);
assertEquals(1.0100451202102512d,val2.powAsDouble(10),0.000000000000001d);
}
InternalCallVerifier EqualityVerifier
@Test public void testCalculateTenThirtySeven(){
Decimal128 ten=new Decimal128(10,(short)0);
Decimal128 val=new Decimal128(1,(short)0);
for (int i=0; i < 37; ++i) {
val.multiplyDestructive(ten,(short)0);
}
String s=val.toFormalString();
assertEquals("10000000000000000000000000000000000000",s);
boolean overflow=false;
}
InternalCallVerifier EqualityVerifier
@Test public void testDoubleValue(){
Decimal128 quotient=new Decimal128();
Decimal128 three=new Decimal128(3);
Decimal128 four=new Decimal128(9);
Decimal128.divide(three,four,quotient,(short)38);
assertEquals(0.33333333333333333333333333d,quotient.doubleValue(),0.0000000000000000000000001d);
Decimal128 minusThree=new Decimal128(-3);
Decimal128.divide(minusThree,four,quotient,(short)38);
assertEquals(-0.33333333333333333333333333d,quotient.doubleValue(),0.0000000000000000000000001d);
}
InternalCallVerifier EqualityVerifier
@Test public void testMultiply(){
Decimal128 result=new Decimal128();
Decimal128.multiply(one,two,result,(short)2);
assertEquals(0,two.compareTo(result));
Decimal128.multiply(two,two,result,(short)2);
assertEquals(0,new Decimal128(4L,(short)0).compareTo(result));
long l1=123456789012345L;
long l2=987654321097L;
Decimal128 left=new Decimal128(l1,(short)0);
Decimal128 right=new Decimal128(l2,(short)0);
UnsignedInt128 unscaled=new UnsignedInt128(l1).multiplyConstructive(new UnsignedInt128(l2));
Decimal128 ans=new Decimal128(unscaled,(short)0,false);
Decimal128.multiply(left,right,result,(short)0);
assertEquals(0,ans.compareTo(result));
Decimal128.multiply(right,left,result,(short)0);
assertEquals(0,ans.compareTo(result));
Decimal128.multiply(new Decimal128(1.123d,(short)10),new Decimal128(4.321d,(short)10),result,(short)10);
assertEquals(1.123d * 4.321d,result.doubleValue(),0.00001d);
assertNotEquals(1.123d * 4.321d,result.doubleValue(),0.00000000000000001d);
Decimal128.multiply(new Decimal128(1.123d,(short)2),new Decimal128(4.321d,(short)2),result,(short)2);
assertEquals(1.123d * 4.321d,result.doubleValue(),1.0d);
assertNotEquals(1.123d * 4.321d,result.doubleValue(),0.000001d);
Decimal128 val=new Decimal128("1.123",(short)3);
val.multiplyDestructive(new Decimal128("4.321",(short)3),(short)6);
assertEquals("4.852483",val.toFormalString());
Decimal128 val1=new Decimal128("1.0001",(short)4);
val1.multiplyDestructive(new Decimal128("1.0001",(short)4),(short)8);
assertEquals("1.00020001",val1.toFormalString());
}
InternalCallVerifier EqualityVerifier
@Test public void testText(){
assertEquals("1",one.toFormalString());
assertEquals(0,new Decimal128("1",(short)0).compareTo(one));
assertEquals("2",two.toFormalString());
assertEquals(0,new Decimal128("2",(short)0).compareTo(two));
assertEquals("0",zero.toFormalString());
assertEquals(0,new Decimal128("0",(short)0).compareTo(zero));
assertEquals("1.000",new Decimal128(1L,(short)3).toFormalString());
assertEquals(0,new Decimal128("1",(short)3).compareTo(one));
assertEquals("2.000000",new Decimal128(2.0d,(short)6).toFormalString());
assertEquals("2.000",new Decimal128(2.0d,(short)3).toFormalString());
assertEquals(0,new Decimal128("2.0",(short)6).compareTo(two));
assertEquals(0,new Decimal128("2.0",(short)3).compareTo(two));
assertEquals("1.3330",new Decimal128("1.333",(short)4).toFormalString());
assertEquals("1.333000",new Decimal128("1.333",(short)6).toFormalString());
assertEquals("1.333",new Decimal128("1.333",(short)3).toFormalString());
assertEquals("1.33",new Decimal128("1.333",(short)2).toFormalString());
assertEquals("1.33",new Decimal128("1.333",(short)2).toFormalString());
assertEquals("0.13330",new Decimal128("1333E-4",(short)5).toFormalString());
assertEquals("0.01333",new Decimal128("1333E-5",(short)5).toFormalString());
assertEquals("13330000.00",new Decimal128("1333E4",(short)2).toFormalString());
assertEquals("123456789012345678901234.56789",new Decimal128("123456789012345678901234567.8901234E-3",(short)5).toFormalString());
}
Class: org.apache.hadoop.hive.common.type.TestHiveChar InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testBasic(){
HiveChar hc=new HiveChar("abc",10);
assertEquals("abc ",hc.toString());
assertEquals("abc ",hc.getPaddedValue());
assertEquals("abc",hc.getStrippedValue());
assertEquals(3,hc.getCharacterLength());
hc.setValue("abc123");
assertEquals("abc123",hc.toString());
assertEquals("abc123",hc.getPaddedValue());
assertEquals("abc123",hc.getStrippedValue());
assertEquals(6,hc.getCharacterLength());
hc.setValue("xyz",15);
assertEquals("xyz ",hc.toString());
assertEquals("xyz ",hc.getPaddedValue());
assertEquals("xyz",hc.getStrippedValue());
assertEquals(3,hc.getCharacterLength());
hc.setValue("abc ",5);
assertEquals("abc ",hc.toString());
assertEquals("abc",hc.getStrippedValue());
assertEquals(3,hc.getCharacterLength());
}
InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testStringLength(){
HiveChar hc=new HiveChar();
hc.setValue("0123456789",5);
assertEquals("01234",hc.toString());
hc.setValue("0123456789",10);
assertEquals("0123456789",hc.toString());
hc.setValue("0123456789",15);
assertEquals("0123456789 ",hc.toString());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testComparison(){
HiveChar hc1=new HiveChar();
HiveChar hc2=new HiveChar();
hc1.setValue("abc",3);
hc2.setValue("abc",3);
assertEquals(hc1,hc2);
assertEquals(hc2,hc1);
assertEquals(0,hc1.compareTo(hc2));
assertEquals(0,hc2.compareTo(hc1));
hc1.setValue("abc",3);
hc1.setValue("123",3);
assertFalse(hc1.equals(hc2));
assertFalse(hc2.equals(hc1));
assertFalse(0 == hc1.compareTo(hc2));
assertFalse(0 == hc2.compareTo(hc1));
hc1.setValue("abc",3);
hc2.setValue("abc",5);
assertEquals("abc",hc1.toString());
assertEquals("abc ",hc2.toString());
assertEquals(hc1,hc2);
assertEquals(hc2,hc1);
assertEquals(0,hc1.compareTo(hc2));
assertEquals(0,hc2.compareTo(hc1));
hc1.setValue(" abc",4);
hc2.setValue("abc",4);
assertFalse(hc1.equals(hc2));
assertFalse(hc2.equals(hc1));
assertFalse(0 == hc1.compareTo(hc2));
assertFalse(0 == hc2.compareTo(hc1));
}
Class: org.apache.hadoop.hive.common.type.TestHiveDecimal EqualityVerifier NullVerifier HybridVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testPlus(){
HiveDecimal dec1=HiveDecimal.create("99999999999999999999999999999999999");
HiveDecimal dec2=HiveDecimal.create("1");
Assert.assertNotNull(dec1.add(dec2));
dec1=HiveDecimal.create("3.140");
dec2=HiveDecimal.create("1.00");
Assert.assertEquals("4.14",dec1.add(dec2).toString());
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testPow(){
HiveDecimal dec=HiveDecimal.create("3.00001415926");
Assert.assertEquals(dec.pow(2),dec.multiply(dec));
HiveDecimal dec1=HiveDecimal.create("0.000017861358882");
dec1=dec1.pow(3);
Assert.assertNull(dec1);
dec1=HiveDecimal.create("3.140");
Assert.assertEquals("9.8596",dec1.pow(2).toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testPosMod(){
HiveDecimal hd1=HiveDecimal.create("-100.91");
HiveDecimal hd2=HiveDecimal.create("9.8");
HiveDecimal dec=hd1.remainder(hd2).add(hd2).remainder(hd2);
Assert.assertEquals("6.89",dec.toString());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testPrecisionScaleEnforcement(){
String decStr="1786135888657847525803324040144343378.09799306448796128931113691624";
HiveDecimal dec=HiveDecimal.create(decStr);
Assert.assertEquals("1786135888657847525803324040144343378.1",dec.toString());
Assert.assertTrue("Decimal precision should not go above maximum",dec.precision() <= HiveDecimal.MAX_PRECISION);
Assert.assertTrue("Decimal scale should not go above maximum",dec.scale() <= HiveDecimal.MAX_SCALE);
decStr="57847525803324040144343378.09799306448796128931113691624";
HiveDecimal bd=HiveDecimal.create(decStr);
HiveDecimal bd1=HiveDecimal.enforcePrecisionScale(bd,20,5);
Assert.assertNull(bd1);
bd1=HiveDecimal.enforcePrecisionScale(bd,35,5);
Assert.assertEquals("57847525803324040144343378.09799",bd1.toString());
bd1=HiveDecimal.enforcePrecisionScale(bd,45,20);
Assert.assertNull(bd1);
dec=HiveDecimal.create(new BigDecimal(decStr),false);
Assert.assertNull(dec);
dec=HiveDecimal.create("-1786135888657847525803324040144343378.09799306448796128931113691624");
Assert.assertEquals("-1786135888657847525803324040144343378.1",dec.toString());
dec=HiveDecimal.create("005.34000");
Assert.assertEquals(dec.precision(),3);
Assert.assertEquals(dec.scale(),2);
dec=HiveDecimal.create("178613588865784752580332404014434337809799306448796128931113691624");
Assert.assertNull(dec);
Assert.assertEquals("10",HiveDecimal.enforcePrecisionScale(HiveDecimal.create("9.5"),2,0).toString());
Assert.assertNull(HiveDecimal.enforcePrecisionScale(HiveDecimal.create("9.5"),1,0));
Assert.assertEquals("9",HiveDecimal.enforcePrecisionScale(HiveDecimal.create("9.4"),1,0).toString());
}
EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testHashCode(){
Assert.assertEquals(HiveDecimal.create("9").hashCode(),HiveDecimal.create("9.00").hashCode());
Assert.assertEquals(HiveDecimal.create("0").hashCode(),HiveDecimal.create("0.00").hashCode());
}
EqualityVerifier NullVerifier HybridVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testDivide(){
HiveDecimal dec1=HiveDecimal.create("3.14");
HiveDecimal dec2=HiveDecimal.create("3");
Assert.assertNotNull(dec1.divide(dec2));
dec1=HiveDecimal.create("15");
dec2=HiveDecimal.create("5");
Assert.assertEquals("3",dec1.divide(dec2).toString());
dec1=HiveDecimal.create("3.140");
dec2=HiveDecimal.create("1.00");
Assert.assertEquals("3.14",dec1.divide(dec2).toString());
}
APIUtilityVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testTrailingZeroRemovalAfterEnforcement(){
String decStr="8.090000000000000000000000000000000000000123456";
HiveDecimal dec=HiveDecimal.create(decStr);
Assert.assertEquals("8.09",dec.toString());
}
EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testSubtract(){
HiveDecimal dec1=HiveDecimal.create("3.140");
HiveDecimal dec2=HiveDecimal.create("1.00");
Assert.assertEquals("2.14",dec1.subtract(dec2).toString());
}
EqualityVerifier NullVerifier HybridVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testMultiply(){
HiveDecimal dec1=HiveDecimal.create("0.00001786135888657847525803");
HiveDecimal dec2=HiveDecimal.create("3.0000123456789");
Assert.assertNull(dec1.multiply(dec2));
dec1=HiveDecimal.create("178613588865784752580323232232323444.4");
dec2=HiveDecimal.create("178613588865784752580302323232.3");
Assert.assertNull(dec1.multiply(dec2));
dec1=HiveDecimal.create("47.324");
dec2=HiveDecimal.create("9232.309");
Assert.assertEquals("436909.791116",dec1.multiply(dec2).toString());
dec1=HiveDecimal.create("3.140");
dec2=HiveDecimal.create("1.00");
Assert.assertEquals("3.14",dec1.multiply(dec2).toString());
dec1=HiveDecimal.create("43.010");
dec2=HiveDecimal.create("2");
Assert.assertEquals("86.02",dec1.multiply(dec2).toString());
}
Class: org.apache.hadoop.hive.common.type.TestHiveIntervalDayTime UtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testValueOf() throws Exception {
HiveIntervalDayTime i1=HiveIntervalDayTime.valueOf("3 04:05:06.123456");
assertEquals(3,i1.getDays());
assertEquals(4,i1.getHours());
assertEquals(5,i1.getMinutes());
assertEquals(6,i1.getSeconds());
assertEquals(123456000,i1.getNanos());
HiveIntervalDayTime i2=HiveIntervalDayTime.valueOf("+3 04:05:06");
assertEquals(3,i2.getDays());
assertEquals(4,i2.getHours());
assertEquals(5,i2.getMinutes());
assertEquals(6,i2.getSeconds());
assertEquals(0,i2.getNanos());
HiveIntervalDayTime i3=HiveIntervalDayTime.valueOf("-12 13:14:15.987654321");
assertEquals(-12,i3.getDays());
assertEquals(-13,i3.getHours());
assertEquals(-14,i3.getMinutes());
assertEquals(-15,i3.getSeconds());
assertEquals(-987654321,i3.getNanos());
HiveIntervalDayTime i4=HiveIntervalDayTime.valueOf("-0 0:0:0.000000012");
assertEquals(0,i4.getDays());
assertEquals(0,i4.getHours());
assertEquals(0,i4.getMinutes());
assertEquals(0,i4.getSeconds());
assertEquals(-12,i4.getNanos());
String[] invalidValues={null,"abc","0-11","0 60:0:0","0 0:60:0"};
for ( String invalidValue : invalidValues) {
boolean caughtException=false;
try {
HiveIntervalDayTime.valueOf(invalidValue);
fail("Expected exception");
}
catch ( IllegalArgumentException err) {
caughtException=true;
}
assertTrue("Expected exception",caughtException);
}
}
InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testGetters() throws Exception {
HiveIntervalDayTime i1=new HiveIntervalDayTime(3,4,5,6,7);
assertEquals(3,i1.getDays());
assertEquals(4,i1.getHours());
assertEquals(5,i1.getMinutes());
assertEquals(6,i1.getSeconds());
assertEquals(7,i1.getNanos());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testCompare() throws Exception {
HiveIntervalDayTime i1=new HiveIntervalDayTime(3,4,5,6,7);
HiveIntervalDayTime i2=new HiveIntervalDayTime(3,4,5,6,7);
HiveIntervalDayTime i3=new HiveIntervalDayTime(3,4,8,9,10);
HiveIntervalDayTime i4=new HiveIntervalDayTime(3,4,8,9,5);
assertEquals(i1 + " compareTo " + i1,0,i1.compareTo(i1));
assertEquals(i1 + " compareTo " + i2,0,i1.compareTo(i2));
assertEquals(i2 + " compareTo " + i1,0,i2.compareTo(i1));
assertEquals(i3 + " compareTo " + i3,0,i3.compareTo(i3));
assertTrue(i1 + " compareTo " + i3,0 > i1.compareTo(i3));
assertTrue(i3 + " compareTo " + i1,0 < i3.compareTo(i1));
assertTrue(i1 + " equals " + i1,i1.equals(i1));
assertTrue(i1 + " equals " + i2,i1.equals(i2));
assertFalse(i1 + " equals " + i3,i1.equals(i3));
assertFalse(i3 + " equals " + i1,i3.equals(i1));
assertFalse(i3 + " equals " + i4,i3.equals(i4));
assertEquals(i1 + " hashCode " + i1,i1.hashCode(),i1.hashCode());
assertEquals(i1 + " hashCode " + i1,i1.hashCode(),i2.hashCode());
}
InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testNormalize() throws Exception {
HiveIntervalDayTime i1=new HiveIntervalDayTime(50,48,3,5400,2000000123);
assertEquals(HiveIntervalDayTime.valueOf("52 1:33:2.000000123"),i1);
assertEquals(52,i1.getDays());
assertEquals(1,i1.getHours());
assertEquals(33,i1.getMinutes());
assertEquals(2,i1.getSeconds());
assertEquals(123,i1.getNanos());
assertEquals(HiveIntervalDayTime.valueOf("0 0:0:0"),new HiveIntervalDayTime(0,0,0,0,0));
assertEquals(HiveIntervalDayTime.valueOf("0 0:0:0"),new HiveIntervalDayTime(2,-48,0,1,-1000000000));
assertEquals(HiveIntervalDayTime.valueOf("0 0:0:0"),new HiveIntervalDayTime(-2,48,0,-1,1000000000));
assertEquals(HiveIntervalDayTime.valueOf("1 0:0:0"),new HiveIntervalDayTime(-1,48,0,0,0));
assertEquals(HiveIntervalDayTime.valueOf("-1 0:0:0"),new HiveIntervalDayTime(1,-48,0,0,0));
assertEquals(HiveIntervalDayTime.valueOf("0 23:59:59.999999999"),new HiveIntervalDayTime(1,0,0,0,-1));
assertEquals(HiveIntervalDayTime.valueOf("-0 23:59:59.999999999"),new HiveIntervalDayTime(-1,0,0,0,1));
assertEquals(HiveIntervalDayTime.valueOf("1 10:11:0"),new HiveIntervalDayTime(-1,10,11,172800,0));
i1=new HiveIntervalDayTime(480,480,0,5400,2000000123);
assertEquals(500,i1.getDays());
assertEquals(1,i1.getHours());
assertEquals(30,i1.getMinutes());
assertEquals(2,i1.getSeconds());
assertEquals(123,i1.getNanos());
}
EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testToString() throws Exception {
assertEquals("0 00:00:00.000000000",HiveIntervalDayTime.valueOf("0 00:00:00").toString());
assertEquals("3 04:05:06.123456000",HiveIntervalDayTime.valueOf("3 04:05:06.123456").toString());
assertEquals("-3 04:05:06.123456000",HiveIntervalDayTime.valueOf("-3 04:05:06.123456").toString());
assertEquals("1 00:00:00.000000000",HiveIntervalDayTime.valueOf("1 00:00:00").toString());
assertEquals("-1 00:00:00.000000000",HiveIntervalDayTime.valueOf("-1 00:00:00").toString());
assertEquals("0 00:00:00.880000000",HiveIntervalDayTime.valueOf("0 00:00:00.88").toString());
assertEquals("-0 00:00:00.880000000",HiveIntervalDayTime.valueOf("-0 00:00:00.88").toString());
assertEquals("-3 04:05:06.000000007",new HiveIntervalDayTime(-3,-4,-5,-6,-7).toString());
assertEquals("3 04:05:06.000000007",new HiveIntervalDayTime(3,4,5,6,7).toString());
}
Class: org.apache.hadoop.hive.common.type.TestHiveIntervalYearMonth InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testNormalize() throws Exception {
HiveIntervalYearMonth i1=new HiveIntervalYearMonth(1,-6);
assertEquals(HiveIntervalYearMonth.valueOf("0-6"),i1);
assertEquals(0,i1.getYears());
assertEquals(6,i1.getMonths());
assertEquals(HiveIntervalYearMonth.valueOf("0-0"),new HiveIntervalYearMonth(0,0));
assertEquals(HiveIntervalYearMonth.valueOf("0-0"),new HiveIntervalYearMonth(-1,12));
assertEquals(HiveIntervalYearMonth.valueOf("0-4"),new HiveIntervalYearMonth(-1,16));
assertEquals(HiveIntervalYearMonth.valueOf("0-11"),new HiveIntervalYearMonth(1,-1));
assertEquals(HiveIntervalYearMonth.valueOf("-0-11"),new HiveIntervalYearMonth(-1,1));
assertEquals(HiveIntervalYearMonth.valueOf("5-1"),new HiveIntervalYearMonth(-5,121));
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testCompare() throws Exception {
HiveIntervalYearMonth i1=new HiveIntervalYearMonth(1,2);
HiveIntervalYearMonth i2=new HiveIntervalYearMonth(1,2);
HiveIntervalYearMonth i3=new HiveIntervalYearMonth(1,3);
assertEquals(i1 + " compareTo " + i1,0,i1.compareTo(i1));
assertEquals(i1 + " compareTo " + i2,0,i1.compareTo(i2));
assertEquals(i2 + " compareTo " + i1,0,i2.compareTo(i1));
assertEquals(i3 + " compareTo " + i3,0,i3.compareTo(i3));
assertTrue(i1 + " compareTo " + i3,0 > i1.compareTo(i3));
assertTrue(i3 + " compareTo " + i1,0 < i3.compareTo(i1));
assertTrue(i1 + " equals " + i1,i1.equals(i1));
assertTrue(i1 + " equals " + i2,i1.equals(i2));
assertFalse(i1 + " equals " + i3,i1.equals(i3));
assertFalse(i3 + " equals " + i1,i3.equals(i1));
assertEquals(i1 + " hashCode " + i1,i1.hashCode(),i1.hashCode());
assertEquals(i1 + " hashCode " + i1,i1.hashCode(),i2.hashCode());
}
EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testToString() throws Exception {
assertEquals("0-0",HiveIntervalYearMonth.valueOf("0-0").toString());
assertEquals("1-2",HiveIntervalYearMonth.valueOf("1-2").toString());
assertEquals("-1-2",HiveIntervalYearMonth.valueOf("-1-2").toString());
assertEquals("1-0",HiveIntervalYearMonth.valueOf("1-0").toString());
assertEquals("-1-0",HiveIntervalYearMonth.valueOf("-1-0").toString());
assertEquals("0-0",HiveIntervalYearMonth.valueOf("-0-0").toString());
}
UtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testValueOf() throws Exception {
HiveIntervalYearMonth i1=HiveIntervalYearMonth.valueOf("1-2");
assertEquals(1,i1.getYears());
assertEquals(2,i1.getMonths());
HiveIntervalYearMonth i2=HiveIntervalYearMonth.valueOf("+8-9");
assertEquals(8,i2.getYears());
assertEquals(9,i2.getMonths());
HiveIntervalYearMonth i3=HiveIntervalYearMonth.valueOf("-10-11");
assertEquals(-10,i3.getYears());
assertEquals(-11,i3.getMonths());
HiveIntervalYearMonth i4=HiveIntervalYearMonth.valueOf("-0-0");
assertEquals(0,i4.getYears());
assertEquals(0,i4.getMonths());
String[] invalidValues={null,"abc","0-12","0 1:2:3"};
for ( String invalidValue : invalidValues) {
boolean caughtException=false;
try {
HiveIntervalYearMonth.valueOf(invalidValue);
fail("Expected exception");
}
catch ( IllegalArgumentException err) {
caughtException=true;
}
assertTrue("Expected exception",caughtException);
}
}
InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testGetters() throws Exception {
HiveIntervalYearMonth i1=new HiveIntervalYearMonth(1,2);
assertEquals(1,i1.getYears());
assertEquals(2,i1.getMonths());
}
Class: org.apache.hadoop.hive.common.type.TestHiveVarchar BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testComparison() throws Exception {
HiveVarchar hc1=new HiveVarchar("abcd",20);
HiveVarchar hc2=new HiveVarchar("abcd",20);
assertTrue(hc1.equals(hc2));
assertTrue(hc2.equals(hc1));
assertEquals(0,hc1.compareTo(hc2));
assertEquals(0,hc2.compareTo(hc1));
hc2=new HiveVarchar("abcde",20);
assertFalse(hc1.equals(hc2));
assertFalse(hc2.equals(hc1));
assertFalse(0 == hc1.compareTo(hc2));
assertFalse(0 == hc2.compareTo(hc1));
hc2=new HiveVarchar("abcd ",30);
assertFalse(hc1.equals(hc2));
assertFalse(hc2.equals(hc1));
assertFalse(0 == hc1.compareTo(hc2));
assertFalse(0 == hc2.compareTo(hc1));
hc2=new HiveVarchar(" abcd",20);
assertFalse(hc1.equals(hc2));
assertFalse(hc2.equals(hc1));
assertFalse(0 == hc1.compareTo(hc2));
assertFalse(0 == hc2.compareTo(hc1));
}
APIUtilityVerifier IterativeVerifier BranchVerifier InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testStringLength() throws Exception {
int strLen=20;
int[] lengths={15,20,25};
for (int idx1=0; idx1 < lengths.length; ++idx1) {
StringBuilder sb=new StringBuilder();
int curLen=lengths[idx1];
for (int idx2=0; idx2 < curLen; ++idx2) {
sb.appendCodePoint(getRandomCodePoint(' '));
}
String testString=sb.toString();
assertEquals(curLen,testString.codePointCount(0,testString.length()));
String enforcedString=HiveBaseChar.enforceMaxLength(testString,strLen);
if (curLen <= strLen) {
assertEquals(testString,enforcedString);
}
else {
assertEquals(strLen,enforcedString.codePointCount(0,enforcedString.length()));
}
}
String[] testStrings={"abcdefg","abcdefghijklmnopqrst","abcdefghijklmnopqrstuvwxyz"};
for ( String testString : testStrings) {
int curLen=testString.length();
assertEquals(curLen,testString.codePointCount(0,testString.length()));
String enforcedString=HiveBaseChar.enforceMaxLength(testString,strLen);
if (curLen <= strLen) {
assertEquals(testString,enforcedString);
}
else {
assertEquals(strLen,enforcedString.codePointCount(0,enforcedString.length()));
}
}
HiveVarchar vc1=new HiveVarchar("0123456789",10);
assertEquals(10,vc1.getCharacterLength());
vc1.setValue("012345678901234");
assertEquals(15,vc1.getCharacterLength());
vc1.setValue("01234",-1);
assertEquals(5,vc1.getCharacterLength());
vc1.setValue(new HiveVarchar("0123456789",-1));
assertEquals(10,vc1.getCharacterLength());
vc1.setValue(new HiveVarchar("01234",-1),-1);
assertEquals(5,vc1.getCharacterLength());
}
Class: org.apache.hadoop.hive.common.type.TestSignedInt128 EqualityVerifier
@Test public void testSignedInt128SignedInt128(){
assertEquals(1L,new SignedInt128(one).longValue());
assertEquals(2L,new SignedInt128(two).longValue());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testZeroClear(){
assertFalse(one.isZero());
assertFalse(two.isZero());
assertTrue(0L != one.longValue());
assertTrue(0L != two.longValue());
two.zeroClear();
assertTrue(0L != one.longValue());
assertEquals(0L,two.longValue());
assertFalse(one.isZero());
assertTrue(two.isZero());
one.zeroClear();
assertEquals(0L,one.longValue());
assertEquals(0L,two.longValue());
assertTrue(one.isZero());
assertTrue(two.isZero());
}
InternalCallVerifier EqualityVerifier
@Test public void testShiftDestructive(){
SignedInt128 big=new SignedInt128((1L << 62) + (23L << 32) + 89L);
big.shiftLeftDestructive(2);
assertEquals(89 * 4,big.getV0());
assertEquals(23 * 4,big.getV1());
assertEquals(1,big.getV2());
assertEquals(0,big.getV3());
big.shiftLeftDestructive(32);
assertEquals(0,big.getV0());
assertEquals(89 * 4,big.getV1());
assertEquals(23 * 4,big.getV2());
assertEquals(1,big.getV3());
big.shiftRightDestructive(2,true);
assertEquals(0,big.getV0());
assertEquals(89,big.getV1());
assertEquals(23 + (1 << 30),big.getV2());
assertEquals(0,big.getV3());
big.shiftRightDestructive(32,true);
assertEquals(89,big.getV0());
assertEquals(23 + (1 << 30),big.getV1());
assertEquals(0,big.getV2());
assertEquals(0,big.getV3());
SignedInt128 tmp=new SignedInt128(17);
assertEquals(17,tmp.getV0());
tmp.shiftRightDestructive(1,true);
assertEquals(9,tmp.getV0());
tmp.shiftRightDestructive(1,false);
assertEquals(4,tmp.getV0());
tmp.shiftRightDestructive(1,true);
assertEquals(2,tmp.getV0());
tmp.shiftRightDestructive(1,true);
assertEquals(1,tmp.getV0());
tmp.shiftRightDestructive(1,true);
assertEquals(1,tmp.getV0());
tmp.shiftRightDestructive(1,false);
assertEquals(0,tmp.getV0());
}
InternalCallVerifier EqualityVerifier
@Test public void testSubtractDestructive(){
two.subtractDestructive(one);
assertEquals(1L,one.longValue());
assertEquals(1L,one.longValue());
one.subtractDestructive(new SignedInt128(10L));
assertEquals(-9L,one.longValue());
SignedInt128 big=new SignedInt128((1L << 62) + (3L << 34) + 3L);
big.shiftLeftDestructive(6);
SignedInt128 tmp=new SignedInt128((1L << 61) + 5L);
tmp.shiftLeftDestructive(6);
big.subtractDestructive(tmp);
big.subtractDestructive(tmp);
assertEquals((3 << 6) - 2 * (5 << 6),big.getV0());
assertEquals((3 << 8) - 1,big.getV1());
assertEquals(0,big.getV2());
assertEquals(0,big.getV3());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testHashCode(){
assertTrue(one.hashCode() != two.hashCode());
assertTrue(zero.hashCode() != one.hashCode());
assertTrue(zero.hashCode() != two.hashCode());
assertTrue(one.hashCode() != negativeOne.hashCode());
assertTrue(two.hashCode() != negativeTwo.hashCode());
assertEquals(zero.hashCode(),new SignedInt128(-0).hashCode());
assertEquals(zero.hashCode(),new SignedInt128(0).hashCode());
assertEquals(one.hashCode(),new SignedInt128(1).hashCode());
assertEquals(two.hashCode(),new SignedInt128(2).hashCode());
}
APIUtilityVerifier UtilityVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testDivideDestructiveInt(){
two.divideDestructive(1);
assertEquals(1L,one.longValue());
assertEquals(2L,two.longValue());
one.divideDestructive(2);
assertEquals(0L,one.longValue());
assertEquals(2L,two.longValue());
SignedInt128 var1=new SignedInt128(1234234662345L);
var1.divideDestructive(642337);
assertEquals(1234234662345L / 642337L,var1.longValue());
SignedInt128 complicated1=new SignedInt128(0xF9892FCA,0x59D109AD,0x0534AB4C,0);
BigInteger bigInteger1=complicated1.toBigIntegerSlow();
complicated1.divideDestructive(1534223465);
BigInteger bigInteger2=BigInteger.valueOf(1534223465);
BigInteger ans=bigInteger1.divide(bigInteger2);
assertEquals(ans,complicated1.toBigIntegerSlow());
try {
complicated1.divideDestructive(0);
fail();
}
catch ( ArithmeticException ex) {
}
}
APIUtilityVerifier UtilityVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testDivideDestructiveSignedInt128(){
SignedInt128 remainder=new SignedInt128();
two.divideDestructive(one,remainder);
assertEquals(1L,one.longValue());
assertEquals(2L,two.longValue());
assertEquals(zero,remainder);
one.divideDestructive(two,remainder);
assertEquals(0L,one.longValue());
assertEquals(2L,two.longValue());
assertEquals(new SignedInt128(1),remainder);
SignedInt128 var1=new SignedInt128(1234234662345L);
var1.divideDestructive(new SignedInt128(642337),remainder);
assertEquals(1234234662345L / 642337L,var1.longValue());
assertEquals(1234234662345L % 642337L,remainder.longValue());
SignedInt128 complicated1=new SignedInt128(0xF9892FCA,0x59D109AD,0x0534AB4C,0x42395ADC);
SignedInt128 complicated2=new SignedInt128(0xF09DC19A,0x00001234,0,0);
BigInteger bigInteger1=complicated1.toBigIntegerSlow();
BigInteger bigInteger2=complicated2.toBigIntegerSlow();
complicated1.divideDestructive(complicated2,remainder);
BigInteger ans=bigInteger1.divide(bigInteger2);
assertEquals(ans,complicated1.toBigIntegerSlow());
try {
complicated1.divideDestructive(zero,remainder);
fail();
}
catch ( ArithmeticException ex) {
}
}
UtilityVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testMultiplyDestructiveInt(){
two.multiplyDestructive(1);
assertEquals(2L,two.longValue());
assertEquals(1L,one.longValue());
two.multiplyDestructive(2);
assertEquals(4L,two.longValue());
SignedInt128 five=new SignedInt128(5);
five.multiplyDestructive(6432346);
assertEquals(6432346 * 5,five.getV0());
assertEquals(0,five.getV1());
assertEquals(0,five.getV2());
assertEquals(0,five.getV3());
SignedInt128 big=new SignedInt128((1L << 62) + (3L << 34) + 3L);
big.multiplyDestructive(96);
assertEquals(3 * 96,big.getV0());
assertEquals(96 * (3 << 2),big.getV1());
assertEquals(96 / 4,big.getV2());
assertEquals(0,big.getV3());
SignedInt128 tmp=new SignedInt128(1);
tmp.shiftLeftDestructive(126);
try {
tmp.multiplyDestructive(2);
fail();
}
catch ( ArithmeticException ex) {
}
}
EqualityVerifier
@Test public void testSignedInt128IntIntIntInt(){
assertEquals(((long)11) << 32L | 23L,new SignedInt128(23,11,0,0).longValue());
}
APIUtilityVerifier UtilityVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testMultiplyDestructiveSignedInt128(){
two.multiplyDestructive(one);
assertEquals(2L,two.longValue());
assertEquals(1L,one.longValue());
two.multiplyDestructive(two);
assertEquals(4L,two.longValue());
SignedInt128 five=new SignedInt128(5);
five.multiplyDestructive(new SignedInt128(6432346));
assertEquals(6432346 * 5,five.getV0());
assertEquals(0,five.getV1());
assertEquals(0,five.getV2());
assertEquals(0,five.getV3());
SignedInt128 big=new SignedInt128((1L << 62) + (3L << 34) + 3L);
big.multiplyDestructive(new SignedInt128(96));
assertEquals(3 * 96,big.getV0());
assertEquals(96 * (3 << 2),big.getV1());
assertEquals(96 / 4,big.getV2());
assertEquals(0,big.getV3());
SignedInt128 tmp=new SignedInt128(1);
tmp.shiftLeftDestructive(126);
try {
tmp.multiplyDestructive(new SignedInt128(2));
fail();
}
catch ( ArithmeticException ex) {
}
SignedInt128 complicated1=new SignedInt128(0xF9892FCA,0x59D109AD,0x0534AB4C,0);
BigInteger bigInteger1=complicated1.toBigIntegerSlow();
SignedInt128 complicated2=new SignedInt128(54234234,9,0,0);
BigInteger bigInteger2=complicated2.toBigIntegerSlow();
complicated1.multiplyDestructive(complicated2);
BigInteger ans=bigInteger1.multiply(bigInteger2);
assertEquals(ans,complicated1.toBigIntegerSlow());
try {
SignedInt128 complicated3=new SignedInt128(0xF9892FCA,0x59D109AD,0x0534AB4C,0);
complicated3.multiplyDestructive(new SignedInt128(54234234,9845,0,0));
fail();
}
catch ( ArithmeticException ex) {
}
}
EqualityVerifier
@Test public void testSignedInt128(){
assertEquals(0L,new SignedInt128().longValue());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDivideDestructiveSignedInt128Again(){
SignedInt128 complicated1=new SignedInt128(0xF9892FCA,0x59D109AD,0,0);
SignedInt128 complicated2=new SignedInt128(0xF09DC19A,3,0,0);
BigInteger bigInteger1=complicated1.toBigIntegerSlow();
BigInteger bigInteger2=complicated2.toBigIntegerSlow();
complicated1.divideDestructive(complicated2,new SignedInt128());
BigInteger ans=bigInteger1.divide(bigInteger2);
assertEquals(ans,complicated1.toBigIntegerSlow());
}
UtilityVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testAddDestructive(){
one.addDestructive(two);
assertEquals(3L,one.longValue());
assertEquals(2L,two.longValue());
SignedInt128 big=new SignedInt128((1L << 62) + 3L);
SignedInt128 tmp=new SignedInt128(0L);
for (int i=0; i < 54; ++i) {
tmp.addDestructive(big);
}
assertEquals(3 * 54,tmp.getV0());
assertEquals(0x80000000,tmp.getV1());
assertEquals(13,tmp.getV2());
assertEquals(0,tmp.getV3());
assertEquals((1L << 62) + 3L,big.longValue());
SignedInt128 huge=new SignedInt128(one);
huge.shiftLeftDestructive(125);
SignedInt128 huge2=new SignedInt128(one);
huge2.shiftLeftDestructive(125);
try {
huge2.addDestructive(huge);
fail();
}
catch ( ArithmeticException ex) {
}
}
InternalCallVerifier EqualityVerifier
@Test public void testToFormalString(){
assertEquals("0",zero.toFormalString());
assertEquals("1",one.toFormalString());
assertEquals("-1",negativeOne.toFormalString());
assertEquals("-2",negativeTwo.toFormalString());
assertEquals("30",new SignedInt128(30).toFormalString());
assertEquals("680000000000",new SignedInt128(680000000000L).toFormalString());
assertEquals("6800000000000",new SignedInt128(6800000000000L).toFormalString());
assertEquals("68",new SignedInt128(68).toFormalString());
assertEquals("-30",new SignedInt128(-30).toFormalString());
assertEquals("-680000000000",new SignedInt128(-680000000000L).toFormalString());
assertEquals("-6800000000000",new SignedInt128(-6800000000000L).toFormalString());
assertEquals("-68",new SignedInt128(-68).toFormalString());
assertEquals(zero,new SignedInt128("0"));
assertEquals(one,new SignedInt128("1"));
assertEquals(zero,new SignedInt128("-0"));
assertEquals(negativeOne,new SignedInt128("-1"));
assertEquals(negativeTwo,new SignedInt128("-2"));
assertEquals(new SignedInt128(30),new SignedInt128("30"));
assertEquals(new SignedInt128(680000000000L),new SignedInt128("680000000000"));
assertEquals(new SignedInt128(6800000000000L),new SignedInt128("6800000000000"));
assertEquals(new SignedInt128(68),new SignedInt128("68"));
assertEquals(new SignedInt128(-30),new SignedInt128("-30"));
assertEquals(new SignedInt128(-680000000000L),new SignedInt128("-680000000000"));
assertEquals(new SignedInt128(-6800000000000L),new SignedInt128("-6800000000000"));
assertEquals(new SignedInt128(-68),new SignedInt128("-68"));
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testEquals(){
assertTrue(!one.equals(two));
assertTrue(!zero.equals(one));
assertTrue(!zero.equals(two));
assertEquals(zero,new SignedInt128(0));
assertEquals(one,new SignedInt128(1));
assertEquals(two,new SignedInt128(2));
assertTrue(!one.equals(negativeOne));
assertTrue(!two.equals(negativeTwo));
assertEquals(zero,new SignedInt128(-0));
}
Class: org.apache.hadoop.hive.common.type.TestSqlMathUtil APIUtilityVerifier EqualityVerifier
@Test public void testDivision(){
{
int[] dividend=new int[]{1 + 33,2 + 21,3,4 + 10,20,30,40,0};
int[] divisor=new int[]{1,2,3,4};
int[] quotient=new int[5];
int[] remainder=SqlMathUtil.divideMultiPrecision(dividend,divisor,quotient);
assertArrayEquals(new int[]{1,0,0,10,0},quotient);
assertArrayEquals(new int[]{33,21,0,0,0,0,0,0,0},remainder);
}
{
int[] dividend=new int[]{0xF7000000,0,0x39000000,0};
int[] divisor=new int[]{0xF700,0,0x3900,0};
int[] quotient=new int[5];
int[] remainder=SqlMathUtil.divideMultiPrecision(dividend,divisor,quotient);
assertArrayEquals(new int[]{0x10000,0,0,0,0},quotient);
assertArrayEquals(new int[]{0,0,0,0,0},remainder);
}
{
int[] dividend=new int[]{0,0,0,0};
int[] divisor=new int[]{0xF700,0,0x3900,0};
int[] quotient=new int[5];
int[] remainder=SqlMathUtil.divideMultiPrecision(dividend,divisor,quotient);
assertArrayEquals(new int[]{0,0,0,0,0},quotient);
assertArrayEquals(new int[]{0,0,0,0,0},remainder);
}
}
Class: org.apache.hadoop.hive.common.type.TestUnsignedInt128 InternalCallVerifier EqualityVerifier
@Test public void testToFormalString(){
assertEquals("0",zero.toFormalString());
assertEquals("1",one.toFormalString());
assertEquals("30",new UnsignedInt128(30).toFormalString());
assertEquals("680000000000",new UnsignedInt128(680000000000L).toFormalString());
assertEquals("6800000000000",new UnsignedInt128(6800000000000L).toFormalString());
assertEquals("68",new UnsignedInt128(68).toFormalString());
assertEquals(zero,new UnsignedInt128("0"));
assertEquals(one,new UnsignedInt128("1"));
assertEquals(new UnsignedInt128(30),new UnsignedInt128("30"));
assertEquals(new UnsignedInt128(680000000000L),new UnsignedInt128("680000000000"));
assertEquals(new UnsignedInt128(6800000000000L),new UnsignedInt128("6800000000000"));
assertEquals(new UnsignedInt128(68),new UnsignedInt128("68"));
}
InternalCallVerifier EqualityVerifier
@Test public void testShiftDestructive(){
UnsignedInt128 big=new UnsignedInt128((1L << 62) + (23L << 32) + 89L);
big.shiftLeftDestructive(2);
assertEquals(89 * 4,big.getV0());
assertEquals(23 * 4,big.getV1());
assertEquals(1,big.getV2());
assertEquals(0,big.getV3());
big.shiftLeftDestructive(32);
assertEquals(0,big.getV0());
assertEquals(89 * 4,big.getV1());
assertEquals(23 * 4,big.getV2());
assertEquals(1,big.getV3());
big.shiftRightDestructive(2,true);
assertEquals(0,big.getV0());
assertEquals(89,big.getV1());
assertEquals(23 + (1 << 30),big.getV2());
assertEquals(0,big.getV3());
big.shiftRightDestructive(32,true);
assertEquals(89,big.getV0());
assertEquals(23 + (1 << 30),big.getV1());
assertEquals(0,big.getV2());
assertEquals(0,big.getV3());
UnsignedInt128 tmp=new UnsignedInt128(17);
assertEquals(17,tmp.getV0());
tmp.shiftRightDestructive(1,true);
assertEquals(9,tmp.getV0());
tmp.shiftRightDestructive(1,false);
assertEquals(4,tmp.getV0());
tmp.shiftRightDestructive(1,true);
assertEquals(2,tmp.getV0());
tmp.shiftRightDestructive(1,true);
assertEquals(1,tmp.getV0());
tmp.shiftRightDestructive(1,true);
assertEquals(1,tmp.getV0());
tmp.shiftRightDestructive(1,false);
assertEquals(0,tmp.getV0());
}
APIUtilityVerifier UtilityVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testMultiplyDestructiveUnsignedInt128(){
two.multiplyDestructive(one);
assertEquals(2L,two.asLong());
assertEquals(1L,one.asLong());
two.multiplyDestructive(two);
assertEquals(4L,two.asLong());
UnsignedInt128 five=new UnsignedInt128(5);
five.multiplyDestructive(new UnsignedInt128(6432346));
assertEquals(6432346 * 5,five.getV0());
assertEquals(0,five.getV1());
assertEquals(0,five.getV2());
assertEquals(0,five.getV3());
UnsignedInt128 big=new UnsignedInt128((1L << 62) + (3L << 34) + 3L);
big.multiplyDestructive(new UnsignedInt128(96));
assertEquals(3 * 96,big.getV0());
assertEquals(96 * (3 << 2),big.getV1());
assertEquals(96 / 4,big.getV2());
assertEquals(0,big.getV3());
UnsignedInt128 tmp=new UnsignedInt128(1);
tmp.shiftLeftDestructive(126);
tmp.multiplyDestructive(new UnsignedInt128(2));
try {
tmp.multiplyDestructive(new UnsignedInt128(2));
fail();
}
catch ( ArithmeticException ex) {
}
UnsignedInt128 complicated1=new UnsignedInt128(0xF9892FCA,0x59D109AD,0x0534AB4C,0);
BigInteger bigInteger1=complicated1.toBigIntegerSlow();
UnsignedInt128 complicated2=new UnsignedInt128(54234234,9,0,0);
BigInteger bigInteger2=complicated2.toBigIntegerSlow();
complicated1.multiplyDestructive(complicated2);
BigInteger ans=bigInteger1.multiply(bigInteger2);
assertEquals(ans,complicated1.toBigIntegerSlow());
try {
UnsignedInt128 complicated3=new UnsignedInt128(0xF9892FCA,0x59D109AD,0x0534AB4C,0);
complicated3.multiplyDestructive(new UnsignedInt128(54234234,9845,0,0));
fail();
}
catch ( ArithmeticException ex) {
}
}
EqualityVerifier
@Test public void testUnsignedInt128UnsignedInt128(){
assertEquals(1L,new UnsignedInt128(one).asLong());
assertEquals(2L,new UnsignedInt128(two).asLong());
}
UtilityVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testSubtractDestructive(){
two.subtractDestructive(one);
assertEquals(1L,one.asLong());
assertEquals(1L,one.asLong());
try {
one.subtractDestructive(new UnsignedInt128(10L));
fail();
}
catch ( ArithmeticException ex) {
}
UnsignedInt128 big=new UnsignedInt128((1L << 62) + (3L << 34) + 3L);
big.shiftLeftDestructive(6);
UnsignedInt128 tmp=new UnsignedInt128((1L << 61) + 5L);
tmp.shiftLeftDestructive(6);
big.subtractDestructive(tmp);
big.subtractDestructive(tmp);
assertEquals((3 << 6) - 2 * (5 << 6),big.getV0());
assertEquals((3 << 8) - 1,big.getV1());
assertEquals(0,big.getV2());
assertEquals(0,big.getV3());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDivideDestructiveUnsignedInt128Again(){
UnsignedInt128 complicated1=new UnsignedInt128(0xF9892FCA,0x59D109AD,0,0);
UnsignedInt128 complicated2=new UnsignedInt128(0xF09DC19A,3,0,0);
BigInteger bigInteger1=complicated1.toBigIntegerSlow();
BigInteger bigInteger2=complicated2.toBigIntegerSlow();
complicated1.divideDestructive(complicated2,new UnsignedInt128());
BigInteger ans=bigInteger1.divide(bigInteger2);
assertEquals(ans,complicated1.toBigIntegerSlow());
}
APIUtilityVerifier UtilityVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testDivideDestructiveUnsignedInt128(){
UnsignedInt128 remainder=new UnsignedInt128();
two.divideDestructive(one,remainder);
assertEquals(1L,one.asLong());
assertEquals(2L,two.asLong());
assertEquals(zero,remainder);
one.divideDestructive(two,remainder);
assertEquals(0L,one.asLong());
assertEquals(2L,two.asLong());
assertEquals(new UnsignedInt128(1),remainder);
UnsignedInt128 var1=new UnsignedInt128(1234234662345L);
var1.divideDestructive(new UnsignedInt128(642337),remainder);
assertEquals(1234234662345L / 642337L,var1.asLong());
assertEquals(1234234662345L % 642337L,remainder.asLong());
UnsignedInt128 complicated1=new UnsignedInt128(0xF9892FCA,0x59D109AD,0x0534AB4C,0x42395ADC);
UnsignedInt128 complicated2=new UnsignedInt128(0xF09DC19A,0x00001234,0,0);
BigInteger bigInteger1=complicated1.toBigIntegerSlow();
BigInteger bigInteger2=complicated2.toBigIntegerSlow();
complicated1.divideDestructive(complicated2,remainder);
BigInteger ans=bigInteger1.divide(bigInteger2);
assertEquals(ans,complicated1.toBigIntegerSlow());
try {
complicated1.divideDestructive(zero,remainder);
fail();
}
catch ( ArithmeticException ex) {
}
}
InternalCallVerifier EqualityVerifier
@Test public void testHashCode(){
assertNotEquals(one.hashCode(),two.hashCode());
assertNotEquals(zero.hashCode(),one.hashCode());
assertNotEquals(zero.hashCode(),two.hashCode());
assertEquals(zero.hashCode(),new UnsignedInt128(0).hashCode());
assertEquals(one.hashCode(),new UnsignedInt128(1).hashCode());
assertEquals(two.hashCode(),new UnsignedInt128(2).hashCode());
}
UtilityVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testMultiplyDestructiveInt(){
two.multiplyDestructive(1);
assertEquals(2L,two.asLong());
assertEquals(1L,one.asLong());
two.multiplyDestructive(2);
assertEquals(4L,two.asLong());
UnsignedInt128 five=new UnsignedInt128(5);
five.multiplyDestructive(6432346);
assertEquals(6432346 * 5,five.getV0());
assertEquals(0,five.getV1());
assertEquals(0,five.getV2());
assertEquals(0,five.getV3());
UnsignedInt128 big=new UnsignedInt128((1L << 62) + (3L << 34) + 3L);
big.multiplyDestructive(96);
assertEquals(3 * 96,big.getV0());
assertEquals(96 * (3 << 2),big.getV1());
assertEquals(96 / 4,big.getV2());
assertEquals(0,big.getV3());
UnsignedInt128 tmp=new UnsignedInt128(1);
tmp.shiftLeftDestructive(126);
tmp.multiplyDestructive(2);
try {
tmp.multiplyDestructive(2);
fail();
}
catch ( ArithmeticException ex) {
}
}
EqualityVerifier
@Test public void testEquals(){
assertNotEquals(one,two);
assertNotEquals(zero,one);
assertNotEquals(zero,two);
assertEquals(zero,new UnsignedInt128(0));
assertEquals(one,new UnsignedInt128(1));
assertEquals(two,new UnsignedInt128(2));
}
IterativeVerifier BranchVerifier InternalCallVerifier EqualityVerifier
@Test public void testMultiplyScaleDownTenDestructiveScaleTen(){
for (int scale=0; scale < 38; ++scale) {
UnsignedInt128 right=new UnsignedInt128(1);
right.scaleUpTenDestructive((short)scale);
{
UnsignedInt128 leftJust=new UnsignedInt128(1);
leftJust.scaleUpTenDestructive((short)15);
UnsignedInt128 leftInc=leftJust.incrementConstructive();
UnsignedInt128 leftDec=leftJust.decrementConstructive();
if (scale + 10 <= 38) {
leftJust.multiplyScaleDownTenDestructive(right,(short)(scale + 10));
assertEquals("scale=" + scale,100000L,leftJust.asLong());
leftInc.multiplyScaleDownTenDestructive(right,(short)(scale + 10));
assertEquals("scale=" + scale,100000L,leftInc.asLong());
leftDec.multiplyScaleDownTenDestructive(right,(short)(scale + 10));
assertEquals("scale=" + scale,100000L,leftDec.asLong());
}
else {
leftJust.multiplyScaleDownTenDestructive(right,(short)(scale + 10));
assertEquals("scale=" + scale,0L,leftJust.asLong());
leftInc.multiplyScaleDownTenDestructive(right,(short)(scale + 10));
assertEquals("scale=" + scale,0L,leftInc.asLong());
leftDec.multiplyScaleDownTenDestructive(right,(short)(scale + 10));
assertEquals("scale=" + scale,0L,leftDec.asLong());
}
}
{
UnsignedInt128 leftHalfJust=new UnsignedInt128(1);
leftHalfJust.scaleUpTenDestructive((short)6);
leftHalfJust.addDestructive(new UnsignedInt128(5));
leftHalfJust.scaleUpTenDestructive((short)9);
UnsignedInt128 leftHalfInc=leftHalfJust.incrementConstructive();
UnsignedInt128 leftHalfDec=leftHalfJust.decrementConstructive();
if (scale + 10 <= 38) {
leftHalfJust.multiplyScaleDownTenDestructive(right,(short)(scale + 10));
assertEquals("scale=" + scale,100001L,leftHalfJust.asLong());
leftHalfInc.multiplyScaleDownTenDestructive(right,(short)(scale + 10));
assertEquals("scale=" + scale,100001L,leftHalfInc.asLong());
leftHalfDec.multiplyScaleDownTenDestructive(right,(short)(scale + 10));
assertEquals("scale=" + scale,100000L,leftHalfDec.asLong());
}
else {
leftHalfJust.multiplyScaleDownTenDestructive(right,(short)(scale + 10));
assertEquals("scale=" + scale,0L,leftHalfJust.asLong());
leftHalfInc.multiplyScaleDownTenDestructive(right,(short)(scale + 10));
assertEquals("scale=" + scale,0L,leftHalfInc.asLong());
leftHalfDec.multiplyScaleDownTenDestructive(right,(short)(scale + 10));
assertEquals("scale=" + scale,0L,leftHalfDec.asLong());
}
}
}
}
UtilityVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testAddDestructive(){
one.addDestructive(two);
assertEquals(3L,one.asLong());
assertEquals(2L,two.asLong());
UnsignedInt128 big=new UnsignedInt128((1L << 62) + 3L);
UnsignedInt128 tmp=new UnsignedInt128(0L);
for (int i=0; i < 54; ++i) {
tmp.addDestructive(big);
}
assertEquals(3 * 54,tmp.getV0());
assertEquals(0x80000000,tmp.getV1());
assertEquals(13,tmp.getV2());
assertEquals(0,tmp.getV3());
assertEquals((1L << 62) + 3L,big.asLong());
UnsignedInt128 huge=one.shiftLeftConstructive(127);
UnsignedInt128 huge2=one.shiftLeftConstructive(127);
try {
huge2.addDestructive(huge);
fail();
}
catch ( ArithmeticException ex) {
}
}
EqualityVerifier
@Test public void testUnsignedInt128(){
assertEquals(0L,new UnsignedInt128().asLong());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testZeroClear(){
assertFalse(one.isZero());
assertFalse(two.isZero());
assertNotEquals(0L,one.asLong());
assertNotEquals(0L,two.asLong());
two.zeroClear();
assertNotEquals(0L,one.asLong());
assertEquals(0L,two.asLong());
assertFalse(one.isZero());
assertTrue(two.isZero());
one.zeroClear();
assertEquals(0L,one.asLong());
assertEquals(0L,two.asLong());
assertTrue(one.isZero());
assertTrue(two.isZero());
}
APIUtilityVerifier UtilityVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testDivideDestructiveInt(){
two.divideDestructive(1);
assertEquals(1L,one.asLong());
assertEquals(2L,two.asLong());
one.divideDestructive(2);
assertEquals(0L,one.asLong());
assertEquals(2L,two.asLong());
UnsignedInt128 var1=new UnsignedInt128(1234234662345L);
var1.divideDestructive(642337);
assertEquals(1234234662345L / 642337L,var1.asLong());
UnsignedInt128 complicated1=new UnsignedInt128(0xF9892FCA,0x59D109AD,0x0534AB4C,0);
BigInteger bigInteger1=complicated1.toBigIntegerSlow();
complicated1.divideDestructive(1534223465);
BigInteger bigInteger2=BigInteger.valueOf(1534223465);
BigInteger ans=bigInteger1.divide(bigInteger2);
assertEquals(ans,complicated1.toBigIntegerSlow());
try {
complicated1.divideDestructive(0);
fail();
}
catch ( ArithmeticException ex) {
}
}
EqualityVerifier
@Test public void testUnsignedInt128IntIntIntInt(){
assertEquals(((long)11) << 32L | 23L,new UnsignedInt128(23,11,0,0).asLong());
}
EqualityVerifier
@Test public void testBigIntConversion(){
BigInteger bigInteger=BigInteger.valueOf(0x1ABCDEF0123456L);
UnsignedInt128 uInt128=new UnsignedInt128(bigInteger);
assertEquals(bigInteger,uInt128.toBigIntegerSlow());
}
Class: org.apache.hadoop.hive.conf.TestHiveConf UtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testHiddenConfig() throws Exception {
HiveConf conf=new HiveConf();
Assert.assertTrue(conf.isHiddenConfig(HiveConf.ConfVars.METASTOREPWD.varname));
Assert.assertTrue(conf.isHiddenConfig(HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname));
try {
final String name=HiveConf.ConfVars.HIVE_CONF_HIDDEN_LIST.varname;
conf.verifyAndSet(name,"");
Assert.fail("Setting config property " + name + " should fail");
}
catch ( IllegalArgumentException e) {
}
Configuration conf2=new Configuration(conf);
conf2.set(HiveConf.ConfVars.METASTOREPWD.varname,"password");
conf2.set(HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname,"password");
conf.stripHiddenConfigurations(conf2);
Assert.assertEquals("",conf2.get(HiveConf.ConfVars.METASTOREPWD.varname));
Assert.assertEquals("",conf2.get(HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname));
}
EqualityVerifier
@Test public void testUnitFor() throws Exception {
Assert.assertEquals(TimeUnit.SECONDS,HiveConf.unitFor("L",TimeUnit.SECONDS));
Assert.assertEquals(TimeUnit.MICROSECONDS,HiveConf.unitFor("",TimeUnit.MICROSECONDS));
Assert.assertEquals(TimeUnit.DAYS,HiveConf.unitFor("d",null));
Assert.assertEquals(TimeUnit.DAYS,HiveConf.unitFor("days",null));
Assert.assertEquals(TimeUnit.HOURS,HiveConf.unitFor("h",null));
Assert.assertEquals(TimeUnit.HOURS,HiveConf.unitFor("hours",null));
Assert.assertEquals(TimeUnit.MINUTES,HiveConf.unitFor("m",null));
Assert.assertEquals(TimeUnit.MINUTES,HiveConf.unitFor("minutes",null));
Assert.assertEquals(TimeUnit.SECONDS,HiveConf.unitFor("s",null));
Assert.assertEquals(TimeUnit.SECONDS,HiveConf.unitFor("seconds",null));
Assert.assertEquals(TimeUnit.MILLISECONDS,HiveConf.unitFor("ms",null));
Assert.assertEquals(TimeUnit.MILLISECONDS,HiveConf.unitFor("msecs",null));
Assert.assertEquals(TimeUnit.MICROSECONDS,HiveConf.unitFor("us",null));
Assert.assertEquals(TimeUnit.MICROSECONDS,HiveConf.unitFor("useconds",null));
Assert.assertEquals(TimeUnit.NANOSECONDS,HiveConf.unitFor("ns",null));
Assert.assertEquals(TimeUnit.NANOSECONDS,HiveConf.unitFor("nsecs",null));
}
APIUtilityVerifier EqualityVerifier
@Test public void testHiveSitePath() throws Exception {
String expectedPath=HiveTestUtils.getFileFromClasspath("hive-site.xml");
String hiveSiteLocation=HiveConf.getHiveSiteLocation().getPath();
if (Shell.WINDOWS) {
expectedPath=expectedPath.toLowerCase();
hiveSiteLocation=hiveSiteLocation.toLowerCase();
}
Assert.assertEquals(expectedPath,hiveSiteLocation);
}
Class: org.apache.hadoop.hive.conf.TestVariableSubstitution InternalCallVerifier EqualityVerifier
@Test public void testVariableSource() throws InterruptedException {
final VariableSubstitution variableSubstitution=new VariableSubstitution(new HiveVariableSource(){
@Override public Map getHiveVariable(){
return TestVariableSubstitution.getMySource().map;
}
}
);
String v=variableSubstitution.substitute(new HiveConf(),"${a}");
Assert.assertEquals("${a}",v);
TestVariableSubstitution.getMySource().put("a","b");
v=variableSubstitution.substitute(new HiveConf(),"${a}");
Assert.assertEquals("b",v);
}
Class: org.apache.hadoop.hive.hbase.TestPutResultWritable InternalCallVerifier EqualityVerifier
@Test public void testPut() throws Exception {
byte[] row=Bytes.toBytes("test-row");
KeyValue[] kvs=new KeyValue[]{new KeyValue(row,Bytes.toBytes("cfa"),Bytes.toBytes("col1"),Bytes.toBytes("cfacol1")),new KeyValue(row,Bytes.toBytes("cfa"),Bytes.toBytes("col2"),Bytes.toBytes("cfacol2"))};
Put expected=new Put(row);
for (int i=0; i < kvs.length; i++) {
expected.add(kvs[i]);
}
PutWritable actual=copy(new PutWritable(expected),new PutWritable());
Assert.assertArrayEquals(expected.getRow(),actual.getPut().getRow());
Assert.assertEquals(expected.getFamilyMap(),actual.getPut().getFamilyMap());
}
EqualityVerifier
@Test public void testResult() throws Exception {
KeyValue[] kvs=new KeyValue[]{new KeyValue(Bytes.toBytes("test-row"),Bytes.toBytes("cfa"),Bytes.toBytes("col1"),Bytes.toBytes("cfacol1")),new KeyValue(Bytes.toBytes("test-row"),Bytes.toBytes("cfa"),Bytes.toBytes("col2"),Bytes.toBytes("cfacol2"))};
Result expected=new Result(kvs);
ResultWritable actual=copy(new ResultWritable(expected),new ResultWritable());
Assert.assertArrayEquals(expected.raw(),actual.getResult().raw());
}
Class: org.apache.hadoop.hive.hooks.TestHs2Hooks BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* Test that hook context properties are correctly set.
*/
@Test public void testHookContexts() throws Throwable {
Properties connProp=new Properties();
connProp.setProperty("user",System.getProperty("user.name"));
connProp.setProperty("password","");
HiveConnection connection=new HiveConnection("jdbc:hive2://localhost:10000/default",connProp);
Statement stmt=connection.createStatement();
stmt.executeQuery("show databases");
stmt.executeQuery("show tables");
Throwable error=PostExecHook.error;
if (error != null) {
throw error;
}
error=PreExecHook.error;
if (error != null) {
throw error;
}
Assert.assertEquals(System.getProperty("user.name"),PostExecHook.userName);
Assert.assertNotNull(PostExecHook.ipAddress,"ipaddress is null");
Assert.assertNotNull(PostExecHook.userName,"userName is null");
Assert.assertNotNull(PostExecHook.operation,"operation is null");
Assert.assertTrue(PostExecHook.ipAddress,PostExecHook.ipAddress.contains("127.0.0.1"));
Assert.assertEquals("SHOWTABLES",PostExecHook.operation);
Assert.assertEquals(System.getProperty("user.name"),PreExecHook.userName);
Assert.assertNotNull(PreExecHook.ipAddress,"ipaddress is null");
Assert.assertNotNull(PreExecHook.userName,"userName is null");
Assert.assertNotNull(PreExecHook.operation,"operation is null");
Assert.assertTrue(PreExecHook.ipAddress,PreExecHook.ipAddress.contains("127.0.0.1"));
Assert.assertEquals("SHOWTABLES",PreExecHook.operation);
error=SemanticAnalysisHook.preAnalyzeError;
if (error != null) {
throw error;
}
error=SemanticAnalysisHook.postAnalyzeError;
if (error != null) {
throw error;
}
Assert.assertNotNull(SemanticAnalysisHook.ipAddress,"semantic hook context ipaddress is null");
Assert.assertNotNull(SemanticAnalysisHook.userName,"semantic hook context userName is null");
Assert.assertNotNull(SemanticAnalysisHook.command,"semantic hook context command is null");
Assert.assertTrue(SemanticAnalysisHook.ipAddress,SemanticAnalysisHook.ipAddress.contains("127.0.0.1"));
Assert.assertEquals("show tables",SemanticAnalysisHook.command);
}
Class: org.apache.hadoop.hive.llap.cache.TestIncrementalObjectSizeEstimator APIUtilityVerifier EqualityVerifier
@Test public void testSimpleTypes(){
JavaDataModel memModel=JavaDataModel.get();
int intSize=runEstimate(new Integer(0),memModel,null);
runEstimate(new String(""),memModel,"empty string");
runEstimate(new String("foobarzzzzzzzzzzzzzz"),memModel,null);
List list=new ArrayList(0);
runEstimate(list,memModel,"empty ArrayList");
list.add(new String("zzz"));
runEstimate(list,memModel,"ArrayList - one string");
list.add(new Integer(5));
list.add(new Integer(6));
int arrayListSize=runEstimate(list,memModel,"ArrayList - 3 elements");
LinkedHashSet list2=new LinkedHashSet(0);
runEstimate(list2,memModel,"empty LinkedHashSet");
list2.add(new String("zzzz"));
runEstimate(list2,memModel,"LinkedHashSet - one string");
list2.add(new Integer(7));
list2.add(new Integer(4));
int lhsSize=runEstimate(list2,memModel,"LinkedHashSet - 3 elements");
Struct struct=new Struct();
int structSize=runEstimate(struct,memModel,"Struct - empty");
struct.i=10;
int structSize2=runEstimate(struct,memModel,"Struct - one reference");
assertEquals(intSize + structSize,structSize2);
struct.list=list;
int structSize3=runEstimate(struct,memModel,"Struct - with ArrayList");
assertEquals(arrayListSize + structSize2,structSize3);
struct.list2=list2;
int structSize4=runEstimate(struct,memModel,"Struct - with LinkedHashSet");
assertEquals(lhsSize + structSize3,structSize4);
Struct2 struct2=new Struct2();
int recSize1=runEstimate(struct2,memModel,"recursive struct - empty");
struct2.next=new Struct2();
struct2.top=new Struct2();
int recSize2=runEstimate(struct2,memModel,"recursive struct - no ring");
assertEquals(recSize1 * 3,recSize2);
struct2.next.prev=struct2;
int recSize3=runEstimate(struct2,memModel,"recursive struct - ring added");
assertEquals(recSize2,recSize3);
}
Class: org.apache.hadoop.hive.llap.cache.TestLowLevelCacheImpl APIUtilityVerifier InternalCallVerifier IdentityVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testGetPut(){
LowLevelCacheImpl cache=new LowLevelCacheImpl(LlapDaemonCacheMetrics.create("test","1"),new DummyCachePolicy(),new DummyAllocator(),true,-1);
long fn1=1, fn2=2;
MemoryBuffer[] fakes=new MemoryBuffer[]{fb(),fb(),fb(),fb(),fb(),fb()};
verifyRefcount(fakes,1,1,1,1,1,1);
assertNull(cache.putFileData(fn1,drs(1,2),fbs(fakes,0,1),0,Priority.NORMAL,null));
assertNull(cache.putFileData(fn2,drs(1,2),fbs(fakes,2,3),0,Priority.NORMAL,null));
verifyCacheGet(cache,fn1,1,3,fakes[0],fakes[1]);
verifyCacheGet(cache,fn2,1,3,fakes[2],fakes[3]);
verifyCacheGet(cache,fn1,2,4,fakes[1],dr(3,4));
verifyRefcount(fakes,3,4,3,3,1,1);
MemoryBuffer[] bufsDiff=fbs(fakes,4,5);
long[] mask=cache.putFileData(fn1,drs(3,1),bufsDiff,0,Priority.NORMAL,null);
assertEquals(1,mask.length);
assertEquals(2,mask[0]);
assertSame(fakes[0],bufsDiff[1]);
verifyRefcount(fakes,4,4,3,3,2,1);
verifyCacheGet(cache,fn1,1,4,fakes[0],fakes[1],fakes[4]);
verifyRefcount(fakes,5,5,3,3,3,1);
}
APIUtilityVerifier IterativeVerifier BranchVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier
@Test public void testMTTWithCleanup(){
final LowLevelCacheImpl cache=new LowLevelCacheImpl(LlapDaemonCacheMetrics.create("test","1"),new DummyCachePolicy(),new DummyAllocator(),true,1);
final long fn1=1, fn2=2;
final int offsetsToUse=8;
final CountDownLatch cdlIn=new CountDownLatch(4), cdlOut=new CountDownLatch(1);
final AtomicInteger rdmsDone=new AtomicInteger(0);
Callable rdmCall=new Callable(){
public Long call(){
int gets=0, puts=0;
try {
Random rdm=new Random(1234 + Thread.currentThread().getId());
syncThreadStart(cdlIn,cdlOut);
for (int i=0; i < 20000; ++i) {
boolean isGet=rdm.nextBoolean(), isFn1=rdm.nextBoolean();
long fileName=isFn1 ? fn1 : fn2;
int fileIndex=isFn1 ? 1 : 2;
int count=rdm.nextInt(offsetsToUse);
if (isGet) {
int[] offsets=new int[count];
count=generateOffsets(offsetsToUse,rdm,offsets);
CreateHelper list=new CreateHelper();
for (int j=0; i < count; ++i) {
list.addOrMerge(offsets[j],offsets[j] + 1,true,false);
}
DiskRangeList iter=cache.getFileData(fileName,list.get(),0,testFactory,null,null);
int j=-1;
while (iter != null) {
++j;
if (!(iter instanceof CacheChunk)) {
iter=iter.next;
continue;
}
++gets;
LlapDataBuffer result=(LlapDataBuffer)((CacheChunk)iter).getBuffer();
assertEquals(makeFakeArenaIndex(fileIndex,offsets[j]),result.arenaIndex);
cache.decRefBuffer(result);
iter=iter.next;
}
}
else {
DiskRange[] ranges=new DiskRange[count];
int[] offsets=new int[count];
for (int j=0; j < count; ++j) {
int next=rdm.nextInt(offsetsToUse);
ranges[j]=dr(next,next + 1);
offsets[j]=next;
}
MemoryBuffer[] buffers=new MemoryBuffer[count];
for (int j=0; j < offsets.length; ++j) {
LlapDataBuffer buf=LowLevelCacheImpl.allocateFake();
buf.arenaIndex=makeFakeArenaIndex(fileIndex,offsets[j]);
buffers[j]=buf;
}
long[] mask=cache.putFileData(fileName,ranges,buffers,0,Priority.NORMAL,null);
puts+=buffers.length;
long maskVal=0;
if (mask != null) {
assertEquals(1,mask.length);
maskVal=mask[0];
}
for (int j=0; j < offsets.length; ++j) {
LlapDataBuffer buf=(LlapDataBuffer)(buffers[j]);
if ((maskVal & 1) == 1) {
assertEquals(makeFakeArenaIndex(fileIndex,offsets[j]),buf.arenaIndex);
}
maskVal>>=1;
cache.decRefBuffer(buf);
}
}
}
}
finally {
rdmsDone.incrementAndGet();
}
return (((long)gets) << 32) | puts;
}
private int makeFakeArenaIndex( int fileIndex, long offset){
return (int)((fileIndex << 16) + offset);
}
}
;
FutureTask evictionTask=new FutureTask(new Callable(){
public Integer call(){
boolean isFirstFile=false;
Random rdm=new Random(1234 + Thread.currentThread().getId());
int evictions=0;
syncThreadStart(cdlIn,cdlOut);
while (rdmsDone.get() < 3) {
DiskRangeList head=new DiskRangeList(0,offsetsToUse + 1);
isFirstFile=!isFirstFile;
long fileId=isFirstFile ? fn1 : fn2;
head=cache.getFileData(fileId,head,0,testFactory,null,null);
DiskRange[] results=head.listToArray();
int startIndex=rdm.nextInt(results.length), index=startIndex;
LlapDataBuffer victim=null;
do {
DiskRange r=results[index];
if (r instanceof CacheChunk) {
LlapDataBuffer result=(LlapDataBuffer)((CacheChunk)r).getBuffer();
cache.decRefBuffer(result);
if (victim == null && result.invalidate()) {
++evictions;
victim=result;
}
}
++index;
if (index == results.length) index=0;
}
while (index != startIndex);
if (victim == null) continue;
cache.notifyEvicted(victim);
}
return evictions;
}
}
);
FutureTask rdmTask1=new FutureTask(rdmCall), rdmTask2=new FutureTask(rdmCall), rdmTask3=new FutureTask(rdmCall);
Executor threadPool=Executors.newFixedThreadPool(4);
threadPool.execute(rdmTask1);
threadPool.execute(rdmTask2);
threadPool.execute(rdmTask3);
threadPool.execute(evictionTask);
try {
cdlIn.await();
cdlOut.countDown();
long result1=rdmTask1.get(), result2=rdmTask2.get(), result3=rdmTask3.get();
int evictions=evictionTask.get();
LOG.info("MTT test: task 1: " + descRdmTask(result1) + ", task 2: "+ descRdmTask(result2)+ ", task 3: "+ descRdmTask(result3)+ "; "+ evictions+ " evictions");
}
catch ( Throwable t) {
throw new RuntimeException(t);
}
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testStaleValueReplace(){
LowLevelCacheImpl cache=new LowLevelCacheImpl(LlapDaemonCacheMetrics.create("test","1"),new DummyCachePolicy(),new DummyAllocator(),true,-1);
long fn1=1, fn2=2;
MemoryBuffer[] fakes=new MemoryBuffer[]{fb(),fb(),fb(),fb(),fb(),fb(),fb(),fb(),fb()};
assertNull(cache.putFileData(fn1,drs(1,2,3),fbs(fakes,0,1,2),0,Priority.NORMAL,null));
assertNull(cache.putFileData(fn2,drs(1),fbs(fakes,3),0,Priority.NORMAL,null));
evict(cache,fakes[0]);
evict(cache,fakes[3]);
long[] mask=cache.putFileData(fn1,drs(1,2,3,4),fbs(fakes,4,5,6,7),0,Priority.NORMAL,null);
assertEquals(1,mask.length);
assertEquals(6,mask[0]);
assertNull(cache.putFileData(fn2,drs(1),fbs(fakes,8),0,Priority.NORMAL,null));
verifyCacheGet(cache,fn1,1,5,fakes[4],fakes[1],fakes[2],fakes[7]);
}
InternalCallVerifier EqualityVerifier
@Test public void testCacheMetrics(){
CreateHelper list=new CreateHelper();
list.addOrMerge(0,100,true,false);
list.addOrMerge(100,200,true,false);
list.addOrMerge(200,300,true,false);
list.addOrMerge(300,400,true,false);
list.addOrMerge(400,500,true,false);
assertEquals(1,list.get().listSize());
assertEquals(500,list.get().getTotalLength());
list=new CreateHelper();
list.addOrMerge(0,100,false,false);
list.addOrMerge(100,200,false,false);
list.addOrMerge(200,300,false,false);
list.addOrMerge(300,400,false,false);
list.addOrMerge(400,500,false,false);
assertEquals(5,list.get().listSize());
assertEquals(500,list.get().getTotalLength());
list=new CreateHelper();
list.addOrMerge(0,100,true,false);
list.addOrMerge(100,200,true,false);
list.addOrMerge(200,300,false,false);
list.addOrMerge(300,400,true,false);
list.addOrMerge(400,500,true,false);
assertEquals(2,list.get().listSize());
assertEquals(500,list.get().getTotalLength());
LlapDaemonCacheMetrics metrics=LlapDaemonCacheMetrics.create("test","1");
LowLevelCacheImpl cache=new LowLevelCacheImpl(metrics,new DummyCachePolicy(),new DummyAllocator(),true,-1);
long fn=1;
MemoryBuffer[] fakes=new MemoryBuffer[]{fb(),fb(),fb()};
cache.putFileData(fn,new DiskRange[]{dr(0,100),dr(300,500),dr(800,1000)},fakes,0,Priority.NORMAL,null);
assertEquals(0,metrics.getCacheRequestedBytes());
assertEquals(0,metrics.getCacheHitBytes());
list=new CreateHelper();
list.addOrMerge(0,1000,true,false);
cache.getFileData(fn,list.get(),0,testFactory,null,null);
assertEquals(1000,metrics.getCacheRequestedBytes());
assertEquals(500,metrics.getCacheHitBytes());
list=new CreateHelper();
list.addOrMerge(0,100,true,false);
cache.getFileData(fn,list.get(),0,testFactory,null,null);
assertEquals(1100,metrics.getCacheRequestedBytes());
assertEquals(600,metrics.getCacheHitBytes());
list=new CreateHelper();
list.addOrMerge(0,100,true,false);
list.addOrMerge(300,500,true,false);
list.addOrMerge(800,1000,true,false);
cache.getFileData(fn,list.get(),0,testFactory,null,null);
assertEquals(1600,metrics.getCacheRequestedBytes());
assertEquals(1100,metrics.getCacheHitBytes());
list=new CreateHelper();
list.addOrMerge(300,500,true,false);
list.addOrMerge(1000,2000,true,false);
cache.getFileData(fn,list.get(),0,testFactory,null,null);
assertEquals(2800,metrics.getCacheRequestedBytes());
assertEquals(1300,metrics.getCacheHitBytes());
}
Class: org.apache.hadoop.hive.llap.cache.TestLowLevelLrfuCachePolicy BooleanVerifier InternalCallVerifier IdentityVerifier EqualityVerifier HybridVerifier
@Test public void testRegression_HIVE_12178() throws Exception {
LOG.info("Testing wrong list status after eviction");
EvictionTracker et=new EvictionTracker();
int memSize=2;
Configuration conf=new Configuration();
conf.setDouble(HiveConf.ConfVars.LLAP_LRFU_LAMBDA.varname,1.0f);
final LowLevelLrfuCachePolicy lrfu=new LowLevelLrfuCachePolicy(1,memSize,conf);
Field f=LowLevelLrfuCachePolicy.class.getDeclaredField("listLock");
f.setAccessible(true);
ReentrantLock listLock=(ReentrantLock)f.get(lrfu);
LowLevelCacheMemoryManager mm=new LowLevelCacheMemoryManager(memSize,lrfu,LlapDaemonCacheMetrics.create("test","1"));
lrfu.setEvictionListener(et);
final LlapDataBuffer buffer1=LowLevelCacheImpl.allocateFake();
LlapDataBuffer buffer2=LowLevelCacheImpl.allocateFake();
assertTrue(cache(mm,lrfu,et,buffer1));
assertTrue(cache(mm,lrfu,et,buffer2));
buffer1.incRef();
assertEquals(LlapCacheableBuffer.IN_LIST,buffer1.indexInHeap);
listLock.lock();
try {
Thread otherThread=new Thread(new Runnable(){
public void run(){
lrfu.notifyLock(buffer1);
}
}
);
otherThread.start();
otherThread.join();
}
finally {
listLock.unlock();
}
mm.reserveMemory(1,false);
assertSame(buffer2,et.evicted.get(0));
unlock(lrfu,buffer1);
}
Class: org.apache.hadoop.hive.llap.cache.TestOrcMetadataCache InternalCallVerifier IdentityVerifier EqualityVerifier HybridVerifier
@Test public void testGetPut() throws Exception {
DummyMemoryManager mm=new DummyMemoryManager();
DummyCachePolicy cp=new DummyCachePolicy();
OrcMetadataCache cache=new OrcMetadataCache(mm,cp);
OrcFileMetadata ofm1=OrcFileMetadata.createDummy(1), ofm2=OrcFileMetadata.createDummy(2);
assertSame(ofm1,cache.putFileMetadata(ofm1));
assertEquals(1,mm.allocs);
assertSame(ofm2,cache.putFileMetadata(ofm2));
assertEquals(2,mm.allocs);
assertSame(ofm1,cache.getFileMetadata(1));
assertSame(ofm2,cache.getFileMetadata(2));
OrcFileMetadata ofm3=OrcFileMetadata.createDummy(1);
assertSame(ofm1,cache.putFileMetadata(ofm3));
assertEquals(2,mm.allocs);
assertSame(ofm1,cache.getFileMetadata(1));
OrcStripeMetadata osm1=OrcStripeMetadata.createDummy(1), osm2=OrcStripeMetadata.createDummy(2);
assertSame(osm1,cache.putStripeMetadata(osm1));
assertEquals(3,mm.allocs);
assertSame(osm2,cache.putStripeMetadata(osm2));
assertEquals(4,mm.allocs);
assertSame(osm1,cache.getStripeMetadata(osm1.getKey()));
assertSame(osm2,cache.getStripeMetadata(osm2.getKey()));
OrcStripeMetadata osm3=OrcStripeMetadata.createDummy(1);
assertSame(osm1,cache.putStripeMetadata(osm3));
assertEquals(4,mm.allocs);
assertSame(osm1,cache.getStripeMetadata(osm3.getKey()));
}
Class: org.apache.hadoop.hive.llap.daemon.impl.TestLlapDaemonProtocolServerImpl APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test(timeout=10000) public void test() throws ServiceException, IOException {
LlapConfiguration daemonConf=new LlapConfiguration();
int rpcPort=HiveConf.getIntVar(daemonConf,ConfVars.LLAP_DAEMON_RPC_PORT);
int numHandlers=HiveConf.getIntVar(daemonConf,ConfVars.LLAP_DAEMON_RPC_NUM_HANDLERS);
ContainerRunner containerRunnerMock=mock(ContainerRunner.class);
LlapProtocolServerImpl server=new LlapProtocolServerImpl(numHandlers,containerRunnerMock,new AtomicReference(),new AtomicReference(),rpcPort,rpcPort + 1);
when(containerRunnerMock.submitWork(any(SubmitWorkRequestProto.class))).thenReturn(SubmitWorkResponseProto.newBuilder().setSubmissionState(SubmissionStateProto.ACCEPTED).build());
try {
server.init(new Configuration());
server.start();
InetSocketAddress serverAddr=server.getBindAddress();
LlapProtocolBlockingPB client=new LlapProtocolClientImpl(new Configuration(),serverAddr.getHostName(),serverAddr.getPort(),null,null);
SubmitWorkResponseProto responseProto=client.submitWork(null,SubmitWorkRequestProto.newBuilder().setAmHost("amhost").setAmPort(2000).build());
assertEquals(responseProto.getSubmissionState().name(),SubmissionStateProto.ACCEPTED.name());
}
finally {
server.stop();
}
}
Class: org.apache.hadoop.hive.llap.daemon.impl.TestQueryIdentifier IterativeVerifier BranchVerifier EqualityVerifier
@Test(timeout=5000) public void testEquality(){
String appIdString1="app1";
String appIdString2="app2";
int dagId1=1;
int dagId2=2;
QueryIdentifier[] queryIdentifiers=new QueryIdentifier[4];
queryIdentifiers[0]=new QueryIdentifier(appIdString1,dagId1);
queryIdentifiers[1]=new QueryIdentifier(appIdString1,dagId2);
queryIdentifiers[2]=new QueryIdentifier(appIdString2,dagId1);
queryIdentifiers[3]=new QueryIdentifier(appIdString2,dagId2);
for (int i=0; i < 4; i++) {
for (int j=0; j < 4; j++) {
if (i == j) {
assertEquals(queryIdentifiers[i],queryIdentifiers[j]);
}
else {
assertNotEquals(queryIdentifiers[i],queryIdentifiers[j]);
}
}
}
QueryIdentifier q11=new QueryIdentifier(appIdString1,dagId1);
QueryIdentifier q12=new QueryIdentifier(appIdString1,dagId2);
QueryIdentifier q21=new QueryIdentifier(appIdString2,dagId1);
QueryIdentifier q22=new QueryIdentifier(appIdString2,dagId2);
assertEquals(queryIdentifiers[0],q11);
assertEquals(queryIdentifiers[1],q12);
assertEquals(queryIdentifiers[2],q21);
assertEquals(queryIdentifiers[3],q22);
}
Class: org.apache.hadoop.hive.llap.daemon.impl.TestTaskExecutorService BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test(timeout=10000) public void testFinishablePreeptsNonFinishable() throws InterruptedException {
MockRequest r1=createMockRequest(1,1,100,false,5000l);
MockRequest r2=createMockRequest(2,1,100,true,1000l);
TaskExecutorServiceForTest taskExecutorService=new TaskExecutorServiceForTest(1,2,ShortestJobFirstComparator.class.getName(),true);
taskExecutorService.init(new Configuration());
taskExecutorService.start();
try {
taskExecutorService.schedule(r1);
r1.awaitStart();
taskExecutorService.schedule(r2);
r2.awaitStart();
r1.awaitEnd();
assertTrue(r1.wasPreempted());
assertTrue(r1.hasFinished());
r2.complete();
r2.awaitEnd();
TaskExecutorServiceForTest.InternalCompletionListenerForTest icl1=taskExecutorService.getInternalCompletionListenerForTest(r1.getRequestId());
TaskExecutorServiceForTest.InternalCompletionListenerForTest icl2=taskExecutorService.getInternalCompletionListenerForTest(r2.getRequestId());
icl1.awaitCompletion();
icl2.awaitCompletion();
assertEquals(0,taskExecutorService.knownTasks.size());
}
finally {
taskExecutorService.shutDown(false);
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test(timeout=10000) public void testWaitQueuePreemption() throws InterruptedException {
MockRequest r1=createMockRequest(1,1,100,true,20000l);
MockRequest r2=createMockRequest(2,1,200,false,20000l);
MockRequest r3=createMockRequest(3,1,300,false,20000l);
MockRequest r4=createMockRequest(4,1,400,false,20000l);
MockRequest r5=createMockRequest(5,1,500,true,20000l);
TaskExecutorServiceForTest taskExecutorService=new TaskExecutorServiceForTest(1,2,ShortestJobFirstComparator.class.getName(),true);
taskExecutorService.init(new Configuration());
taskExecutorService.start();
try {
taskExecutorService.schedule(r1);
r1.awaitStart();
Scheduler.SubmissionState submissionState=taskExecutorService.schedule(r2);
assertEquals(Scheduler.SubmissionState.ACCEPTED,submissionState);
submissionState=taskExecutorService.schedule(r3);
assertEquals(Scheduler.SubmissionState.ACCEPTED,submissionState);
submissionState=taskExecutorService.schedule(r4);
assertEquals(Scheduler.SubmissionState.REJECTED,submissionState);
submissionState=taskExecutorService.schedule(r5);
assertEquals(Scheduler.SubmissionState.EVICTED_OTHER,submissionState);
assertEquals(true,r3.wasPreempted());
TaskExecutorServiceForTest.InternalCompletionListenerForTest icl1=taskExecutorService.getInternalCompletionListenerForTest(r1.getRequestId());
assertEquals(3,taskExecutorService.knownTasks.size());
assertTrue(taskExecutorService.knownTasks.containsKey(r1.getRequestId()));
assertTrue(taskExecutorService.knownTasks.containsKey(r2.getRequestId()));
assertTrue(taskExecutorService.knownTasks.containsKey(r5.getRequestId()));
r1.complete();
r1.awaitEnd();
icl1.awaitCompletion();
assertEquals(2,taskExecutorService.knownTasks.size());
assertTrue(taskExecutorService.knownTasks.containsKey(r2.getRequestId()));
assertTrue(taskExecutorService.knownTasks.containsKey(r5.getRequestId()));
r5.awaitStart();
TaskExecutorServiceForTest.InternalCompletionListenerForTest icl5=taskExecutorService.getInternalCompletionListenerForTest(r5.getRequestId());
r5.complete();
r5.awaitEnd();
icl5.awaitCompletion();
assertEquals(1,taskExecutorService.knownTasks.size());
assertTrue(taskExecutorService.knownTasks.containsKey(r2.getRequestId()));
r2.awaitStart();
TaskExecutorServiceForTest.InternalCompletionListenerForTest icl2=taskExecutorService.getInternalCompletionListenerForTest(r2.getRequestId());
r2.complete();
r2.awaitEnd();
icl2.awaitCompletion();
assertEquals(0,taskExecutorService.knownTasks.size());
}
finally {
taskExecutorService.shutDown(false);
}
}
EqualityVerifier
@Test(timeout=5000) public void testPreemptionQueueComparator() throws InterruptedException {
TaskWrapper r1=createTaskWrapper(createSubmitWorkRequestProto(1,2,100),false,100000);
TaskWrapper r2=createTaskWrapper(createSubmitWorkRequestProto(2,4,200),false,100000);
TaskWrapper r3=createTaskWrapper(createSubmitWorkRequestProto(3,6,300),false,1000000);
TaskWrapper r4=createTaskWrapper(createSubmitWorkRequestProto(4,8,400),false,1000000);
BlockingQueue queue=new PriorityBlockingQueue<>(4,new TaskExecutorService.PreemptionQueueComparator());
queue.offer(r1);
assertEquals(r1,queue.peek());
queue.offer(r2);
assertEquals(r1,queue.peek());
queue.offer(r3);
assertEquals(r1,queue.peek());
queue.offer(r4);
assertEquals(r1,queue.take());
assertEquals(r2,queue.take());
assertEquals(r3,queue.take());
assertEquals(r4,queue.take());
}
Class: org.apache.hadoop.hive.llap.daemon.impl.comparator.TestFirstInFirstOutComparator InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testWaitQueueComparator() throws InterruptedException {
TaskWrapper r1=createTaskWrapper(createRequest(1,2,5,100),false,100000);
TaskWrapper r2=createTaskWrapper(createRequest(2,4,4,200),false,100000);
TaskWrapper r3=createTaskWrapper(createRequest(3,6,3,300),false,1000000);
TaskWrapper r4=createTaskWrapper(createRequest(4,8,2,400),false,1000000);
TaskWrapper r5=createTaskWrapper(createRequest(5,10,1,500),false,1000000);
EvictingPriorityBlockingQueue queue=new EvictingPriorityBlockingQueue<>(new FirstInFirstOutComparator(),4);
assertNull(queue.offer(r1));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r2));
assertEquals(r2,queue.peek());
assertNull(queue.offer(r3));
assertEquals(r3,queue.peek());
assertNull(queue.offer(r4));
assertEquals(r4,queue.peek());
assertEquals(r1,queue.offer(r5));
assertEquals(r5,queue.take());
assertEquals(r4,queue.take());
assertEquals(r3,queue.take());
assertEquals(r2,queue.take());
r1=createTaskWrapper(createRequest(1,2,5,100),true,100000);
r2=createTaskWrapper(createRequest(2,4,4,200),true,100000);
r3=createTaskWrapper(createRequest(3,6,3,300),true,1000000);
r4=createTaskWrapper(createRequest(4,8,2,400),true,1000000);
r5=createTaskWrapper(createRequest(5,10,1,500),true,1000000);
queue=new EvictingPriorityBlockingQueue(new FirstInFirstOutComparator(),4);
assertNull(queue.offer(r1));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r2));
assertEquals(r2,queue.peek());
assertNull(queue.offer(r3));
assertEquals(r3,queue.peek());
assertNull(queue.offer(r4));
assertEquals(r4,queue.peek());
assertEquals(r1,queue.offer(r5));
assertEquals(r5,queue.take());
assertEquals(r4,queue.take());
assertEquals(r3,queue.take());
assertEquals(r2,queue.take());
r1=createTaskWrapper(createRequest(1,1,5,100),true,100000);
r2=createTaskWrapper(createRequest(2,1,4,200),false,100000);
r3=createTaskWrapper(createRequest(3,1,3,300),true,1000000);
r4=createTaskWrapper(createRequest(4,1,2,400),false,1000000);
r5=createTaskWrapper(createRequest(5,10,1,500),true,1000000);
queue=new EvictingPriorityBlockingQueue(new FirstInFirstOutComparator(),4);
assertNull(queue.offer(r1));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r2));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r3));
assertEquals(r3,queue.peek());
assertNull(queue.offer(r4));
assertEquals(r3,queue.peek());
assertEquals(r2,queue.offer(r5));
assertEquals(r5,queue.take());
assertEquals(r3,queue.take());
assertEquals(r1,queue.take());
assertEquals(r4,queue.take());
r1=createTaskWrapper(createRequest(1,2,5,100),true,100000);
r2=createTaskWrapper(createRequest(2,4,4,200),false,100000);
r3=createTaskWrapper(createRequest(3,6,3,300),true,1000000);
r4=createTaskWrapper(createRequest(4,8,2,400),false,1000000);
r5=createTaskWrapper(createRequest(5,10,1,500),true,1000000);
queue=new EvictingPriorityBlockingQueue(new FirstInFirstOutComparator(),4);
assertNull(queue.offer(r1));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r2));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r3));
assertEquals(r3,queue.peek());
assertNull(queue.offer(r4));
assertEquals(r3,queue.peek());
assertEquals(r2,queue.offer(r5));
assertEquals(r5,queue.take());
assertEquals(r3,queue.take());
assertEquals(r1,queue.take());
assertEquals(r4,queue.take());
r1=createTaskWrapper(createRequest(1,2,5,100),true,100000);
r2=createTaskWrapper(createRequest(2,4,4,200),false,100000);
r3=createTaskWrapper(createRequest(3,6,3,300),false,1000000);
r4=createTaskWrapper(createRequest(4,8,2,400),false,1000000);
r5=createTaskWrapper(createRequest(5,10,1,500),true,1000000);
queue=new EvictingPriorityBlockingQueue(new FirstInFirstOutComparator(),4);
assertNull(queue.offer(r1));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r2));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r3));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r4));
assertEquals(r1,queue.peek());
assertEquals(r2,queue.offer(r5));
assertEquals(r5,queue.take());
assertEquals(r1,queue.take());
assertEquals(r4,queue.take());
assertEquals(r3,queue.take());
r1=createTaskWrapper(createRequest(1,2,5,100),false,100000);
r2=createTaskWrapper(createRequest(2,4,4,200),true,100000);
r3=createTaskWrapper(createRequest(3,6,3,300),true,1000000);
r4=createTaskWrapper(createRequest(4,8,2,400),true,1000000);
r5=createTaskWrapper(createRequest(5,10,1,500),true,1000000);
queue=new EvictingPriorityBlockingQueue(new FirstInFirstOutComparator(),4);
assertNull(queue.offer(r1));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r2));
assertEquals(r2,queue.peek());
assertNull(queue.offer(r3));
assertEquals(r3,queue.peek());
assertNull(queue.offer(r4));
assertEquals(r4,queue.peek());
assertEquals(r1,queue.offer(r5));
assertEquals(r5,queue.take());
assertEquals(r4,queue.take());
assertEquals(r3,queue.take());
assertEquals(r2,queue.take());
r1=createTaskWrapper(createRequest(1,2,5,100),false,100000);
r2=createTaskWrapper(createRequest(2,4,4,200),true,100000);
r3=createTaskWrapper(createRequest(3,6,3,300),true,1000000);
r4=createTaskWrapper(createRequest(4,8,2,400),true,1000000);
r5=createTaskWrapper(createRequest(5,10,2,500),true,1000000);
queue=new EvictingPriorityBlockingQueue(new FirstInFirstOutComparator(),4);
assertNull(queue.offer(r1));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r2));
assertEquals(r2,queue.peek());
assertNull(queue.offer(r3));
assertEquals(r3,queue.peek());
assertNull(queue.offer(r4));
assertEquals(r4,queue.peek());
assertEquals(r1,queue.offer(r5));
assertEquals(r4,queue.take());
assertEquals(r5,queue.take());
assertEquals(r3,queue.take());
assertEquals(r2,queue.take());
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test(timeout=5000) public void testWaitQueueComparatorWithinDagPriority() throws InterruptedException {
TaskWrapper r1=createTaskWrapper(createRequest(1,1,0,100,100,10),false,100000);
TaskWrapper r2=createTaskWrapper(createRequest(2,1,0,100,100,1),false,100000);
TaskWrapper r3=createTaskWrapper(createRequest(3,1,0,100,100,5),false,100000);
EvictingPriorityBlockingQueue queue=new EvictingPriorityBlockingQueue<>(new FirstInFirstOutComparator(),4);
assertNull(queue.offer(r1));
assertNull(queue.offer(r2));
assertNull(queue.offer(r3));
assertEquals(r2,queue.take());
assertEquals(r3,queue.take());
assertEquals(r1,queue.take());
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test(timeout=5000) public void testWaitQueueComparatorParallelism() throws InterruptedException {
TaskWrapper r1=createTaskWrapper(createRequest(1,10,3,100,100,1),false,100000);
TaskWrapper r2=createTaskWrapper(createRequest(2,10,7,100,100,1),false,100000);
TaskWrapper r3=createTaskWrapper(createRequest(3,10,5,100,100,1),false,100000);
EvictingPriorityBlockingQueue queue=new EvictingPriorityBlockingQueue<>(new FirstInFirstOutComparator(),4);
assertNull(queue.offer(r1));
assertNull(queue.offer(r2));
assertNull(queue.offer(r3));
assertEquals(r2,queue.take());
assertEquals(r3,queue.take());
assertEquals(r1,queue.take());
}
Class: org.apache.hadoop.hive.llap.daemon.impl.comparator.TestShortestJobFirstComparator InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test(timeout=5000) public void testWaitQueueComparator() throws InterruptedException {
TaskWrapper r1=createTaskWrapper(createSubmitWorkRequestProto(1,2,100),false,100000);
TaskWrapper r2=createTaskWrapper(createSubmitWorkRequestProto(2,4,200),false,100000);
TaskWrapper r3=createTaskWrapper(createSubmitWorkRequestProto(3,6,300),false,1000000);
TaskWrapper r4=createTaskWrapper(createSubmitWorkRequestProto(4,8,400),false,1000000);
TaskWrapper r5=createTaskWrapper(createSubmitWorkRequestProto(5,10,500),false,1000000);
EvictingPriorityBlockingQueue queue=new EvictingPriorityBlockingQueue<>(new ShortestJobFirstComparator(),4);
assertNull(queue.offer(r1));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r2));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r3));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r4));
assertEquals(r1,queue.peek());
assertEquals(r5,queue.offer(r5));
assertEquals(r1,queue.take());
assertEquals(r2,queue.take());
assertEquals(r3,queue.take());
assertEquals(r4,queue.take());
r1=createTaskWrapper(createSubmitWorkRequestProto(1,2,100),true,100000);
r2=createTaskWrapper(createSubmitWorkRequestProto(2,4,200),true,100000);
r3=createTaskWrapper(createSubmitWorkRequestProto(3,6,300),true,1000000);
r4=createTaskWrapper(createSubmitWorkRequestProto(4,8,400),true,1000000);
r5=createTaskWrapper(createSubmitWorkRequestProto(5,10,500),true,1000000);
queue=new EvictingPriorityBlockingQueue(new ShortestJobFirstComparator(),4);
assertNull(queue.offer(r1));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r2));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r3));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r4));
assertEquals(r1,queue.peek());
assertEquals(r5,queue.offer(r5));
assertEquals(r1,queue.take());
assertEquals(r2,queue.take());
assertEquals(r3,queue.take());
assertEquals(r4,queue.take());
r1=createTaskWrapper(createSubmitWorkRequestProto(1,1,100),true,100000);
r2=createTaskWrapper(createSubmitWorkRequestProto(2,1,200),false,100000);
r3=createTaskWrapper(createSubmitWorkRequestProto(3,1,300),true,1000000);
r4=createTaskWrapper(createSubmitWorkRequestProto(4,1,400),false,1000000);
r5=createTaskWrapper(createSubmitWorkRequestProto(5,10,500),true,1000000);
queue=new EvictingPriorityBlockingQueue(new ShortestJobFirstComparator(),4);
assertNull(queue.offer(r1));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r2));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r3));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r4));
assertEquals(r1,queue.peek());
assertEquals(r4,queue.offer(r5));
assertEquals(r1,queue.take());
assertEquals(r3,queue.take());
assertEquals(r5,queue.take());
assertEquals(r2,queue.take());
r1=createTaskWrapper(createSubmitWorkRequestProto(1,2,100),true,100000);
r2=createTaskWrapper(createSubmitWorkRequestProto(2,4,200),false,100000);
r3=createTaskWrapper(createSubmitWorkRequestProto(3,6,300),true,1000000);
r4=createTaskWrapper(createSubmitWorkRequestProto(4,8,400),false,1000000);
r5=createTaskWrapper(createSubmitWorkRequestProto(5,10,500),true,1000000);
queue=new EvictingPriorityBlockingQueue(new ShortestJobFirstComparator(),4);
assertNull(queue.offer(r1));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r2));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r3));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r4));
assertEquals(r1,queue.peek());
assertEquals(r4,queue.offer(r5));
assertEquals(r1,queue.take());
assertEquals(r3,queue.take());
assertEquals(r5,queue.take());
assertEquals(r2,queue.take());
r1=createTaskWrapper(createSubmitWorkRequestProto(1,2,100),true,100000);
r2=createTaskWrapper(createSubmitWorkRequestProto(2,4,200),false,100000);
r3=createTaskWrapper(createSubmitWorkRequestProto(3,6,300),false,1000000);
r4=createTaskWrapper(createSubmitWorkRequestProto(4,8,400),false,1000000);
r5=createTaskWrapper(createSubmitWorkRequestProto(5,10,500),true,1000000);
queue=new EvictingPriorityBlockingQueue(new ShortestJobFirstComparator(),4);
assertNull(queue.offer(r1));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r2));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r3));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r4));
assertEquals(r1,queue.peek());
assertEquals(r4,queue.offer(r5));
assertEquals(r1,queue.take());
assertEquals(r5,queue.take());
assertEquals(r2,queue.take());
assertEquals(r3,queue.take());
r1=createTaskWrapper(createSubmitWorkRequestProto(1,2,100),false,100000);
r2=createTaskWrapper(createSubmitWorkRequestProto(2,4,200),true,100000);
r3=createTaskWrapper(createSubmitWorkRequestProto(3,6,300),true,1000000);
r4=createTaskWrapper(createSubmitWorkRequestProto(4,8,400),true,1000000);
r5=createTaskWrapper(createSubmitWorkRequestProto(5,10,500),true,1000000);
queue=new EvictingPriorityBlockingQueue(new ShortestJobFirstComparator(),4);
assertNull(queue.offer(r1));
assertEquals(r1,queue.peek());
assertNull(queue.offer(r2));
assertEquals(r2,queue.peek());
assertNull(queue.offer(r3));
assertEquals(r2,queue.peek());
assertNull(queue.offer(r4));
assertEquals(r2,queue.peek());
assertEquals(r1,queue.offer(r5));
assertEquals(r2,queue.take());
assertEquals(r3,queue.take());
assertEquals(r4,queue.take());
assertEquals(r5,queue.take());
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test(timeout=5000) public void testWaitQueueComparatorWithinDagPriority() throws InterruptedException {
TaskWrapper r1=createTaskWrapper(createSubmitWorkRequestProto(1,1,0,100,10),false,100000);
TaskWrapper r2=createTaskWrapper(createSubmitWorkRequestProto(2,1,0,100,1),false,100000);
TaskWrapper r3=createTaskWrapper(createSubmitWorkRequestProto(3,1,0,100,5),false,100000);
EvictingPriorityBlockingQueue queue=new EvictingPriorityBlockingQueue<>(new ShortestJobFirstComparator(),4);
assertNull(queue.offer(r1));
assertNull(queue.offer(r2));
assertNull(queue.offer(r3));
assertEquals(r2,queue.take());
assertEquals(r3,queue.take());
assertEquals(r1,queue.take());
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test(timeout=5000) public void testWaitQueueComparatorParallelism() throws InterruptedException {
TaskWrapper r1=createTaskWrapper(createSubmitWorkRequestProto(1,10,3,100,1),false,100000);
TaskWrapper r2=createTaskWrapper(createSubmitWorkRequestProto(2,10,7,100,1),false,100000);
TaskWrapper r3=createTaskWrapper(createSubmitWorkRequestProto(3,10,5,100,1),false,100000);
EvictingPriorityBlockingQueue queue=new EvictingPriorityBlockingQueue<>(new ShortestJobFirstComparator(),4);
assertNull(queue.offer(r1));
assertNull(queue.offer(r2));
assertNull(queue.offer(r3));
assertEquals(r2,queue.take());
assertEquals(r3,queue.take());
assertEquals(r1,queue.take());
}
Class: org.apache.hadoop.hive.llap.tez.TestLlapDaemonProtocolClientProxy InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test(timeout=5000) public void testMultipleNodes(){
RequestManagerForTest requestManager=new RequestManagerForTest(1);
LlapNodeId nodeId1=LlapNodeId.getInstance("host1",1025);
LlapNodeId nodeId2=LlapNodeId.getInstance("host2",1025);
Message mockMessage=mock(Message.class);
LlapProtocolClientProxy.ExecuteRequestCallback mockExecuteRequestCallback=mock(LlapProtocolClientProxy.ExecuteRequestCallback.class);
requestManager.queueRequest(new CallableRequestForTest(nodeId1,mockMessage,mockExecuteRequestCallback));
requestManager.queueRequest(new CallableRequestForTest(nodeId2,mockMessage,mockExecuteRequestCallback));
requestManager.process();
assertEquals(2,requestManager.numSubmissionsCounters);
assertNotNull(requestManager.numInvocationsPerNode.get(nodeId1));
assertNotNull(requestManager.numInvocationsPerNode.get(nodeId2));
Assert.assertEquals(1,requestManager.numInvocationsPerNode.get(nodeId1).getValue().intValue());
Assert.assertEquals(1,requestManager.numInvocationsPerNode.get(nodeId2).getValue().intValue());
assertEquals(0,requestManager.currentLoopSkippedRequests.size());
assertEquals(0,requestManager.currentLoopSkippedRequests.size());
assertEquals(0,requestManager.currentLoopDisabledNodes.size());
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test(timeout=5000) public void testSingleInvocationPerNode(){
RequestManagerForTest requestManager=new RequestManagerForTest(1);
LlapNodeId nodeId1=LlapNodeId.getInstance("host1",1025);
Message mockMessage=mock(Message.class);
LlapProtocolClientProxy.ExecuteRequestCallback mockExecuteRequestCallback=mock(LlapProtocolClientProxy.ExecuteRequestCallback.class);
requestManager.queueRequest(new CallableRequestForTest(nodeId1,mockMessage,mockExecuteRequestCallback));
requestManager.process();
assertEquals(1,requestManager.numSubmissionsCounters);
assertNotNull(requestManager.numInvocationsPerNode.get(nodeId1));
Assert.assertEquals(1,requestManager.numInvocationsPerNode.get(nodeId1).getValue().intValue());
assertEquals(0,requestManager.currentLoopSkippedRequests.size());
requestManager.queueRequest(new CallableRequestForTest(nodeId1,mockMessage,mockExecuteRequestCallback));
requestManager.process();
assertEquals(1,requestManager.numSubmissionsCounters);
assertNotNull(requestManager.numInvocationsPerNode.get(nodeId1));
Assert.assertEquals(1,requestManager.numInvocationsPerNode.get(nodeId1).getValue().intValue());
assertEquals(1,requestManager.currentLoopSkippedRequests.size());
assertEquals(1,requestManager.currentLoopDisabledNodes.size());
assertTrue(requestManager.currentLoopDisabledNodes.contains(nodeId1));
requestManager.requestFinished(nodeId1);
requestManager.process();
assertEquals(2,requestManager.numSubmissionsCounters);
assertNotNull(requestManager.numInvocationsPerNode.get(nodeId1));
Assert.assertEquals(2,requestManager.numInvocationsPerNode.get(nodeId1).getValue().intValue());
assertEquals(0,requestManager.currentLoopSkippedRequests.size());
assertEquals(0,requestManager.currentLoopDisabledNodes.size());
assertFalse(requestManager.currentLoopDisabledNodes.contains(nodeId1));
}
Class: org.apache.hadoop.hive.llap.tezplugins.TestLlapTaskCommunicator InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test(timeout=5000) public void testEntityTracker1(){
LlapTaskCommunicator.EntityTracker entityTracker=new LlapTaskCommunicator.EntityTracker();
String host1="host1";
String host2="host2";
String host3="host3";
int port=1451;
ContainerId containerId101=constructContainerId(101);
entityTracker.registerContainer(containerId101,host1,port);
assertEquals(LlapNodeId.getInstance(host1,port),entityTracker.getNodeIdForContainer(containerId101));
entityTracker.unregisterContainer(containerId101);
assertNull(entityTracker.getContainerAttemptMapForNode(LlapNodeId.getInstance(host1,port)));
assertNull(entityTracker.getNodeIdForContainer(containerId101));
assertEquals(0,entityTracker.nodeMap.size());
assertEquals(0,entityTracker.attemptToNodeMap.size());
assertEquals(0,entityTracker.containerToNodeMap.size());
ContainerId containerId1=constructContainerId(1);
TezTaskAttemptID taskAttemptId1=constructTaskAttemptId(1);
entityTracker.registerTaskAttempt(containerId1,taskAttemptId1,host1,port);
assertEquals(LlapNodeId.getInstance(host1,port),entityTracker.getNodeIdForContainer(containerId1));
assertEquals(LlapNodeId.getInstance(host1,port),entityTracker.getNodeIdForTaskAttempt(taskAttemptId1));
entityTracker.unregisterTaskAttempt(taskAttemptId1);
assertNull(entityTracker.getContainerAttemptMapForNode(LlapNodeId.getInstance(host1,port)));
assertNull(entityTracker.getNodeIdForContainer(containerId1));
assertNull(entityTracker.getNodeIdForTaskAttempt(taskAttemptId1));
assertEquals(0,entityTracker.nodeMap.size());
assertEquals(0,entityTracker.attemptToNodeMap.size());
assertEquals(0,entityTracker.containerToNodeMap.size());
ContainerId containerId201=constructContainerId(201);
TezTaskAttemptID taskAttemptId201=constructTaskAttemptId(201);
entityTracker.registerTaskAttempt(containerId201,taskAttemptId201,host1,port);
assertEquals(LlapNodeId.getInstance(host1,port),entityTracker.getNodeIdForContainer(containerId201));
assertEquals(LlapNodeId.getInstance(host1,port),entityTracker.getNodeIdForTaskAttempt(taskAttemptId201));
entityTracker.unregisterContainer(containerId201);
assertNull(entityTracker.getContainerAttemptMapForNode(LlapNodeId.getInstance(host1,port)));
assertNull(entityTracker.getNodeIdForContainer(containerId201));
assertNull(entityTracker.getNodeIdForTaskAttempt(taskAttemptId201));
assertEquals(0,entityTracker.nodeMap.size());
assertEquals(0,entityTracker.attemptToNodeMap.size());
assertEquals(0,entityTracker.containerToNodeMap.size());
entityTracker.unregisterTaskAttempt(taskAttemptId201);
}
Class: org.apache.hadoop.hive.llap.tezplugins.TestLlapTaskSchedulerService InternalCallVerifier EqualityVerifier
@Test(timeout=5000) public void testForcedLocalityPreemption() throws IOException, InterruptedException {
Priority priority1=Priority.newInstance(1);
Priority priority2=Priority.newInstance(2);
String[] hosts=new String[]{HOST1,HOST2};
String[] hostsH1=new String[]{HOST1};
String[] hostsH2=new String[]{HOST2};
TestTaskSchedulerServiceWrapper tsWrapper=new TestTaskSchedulerServiceWrapper(2000,hosts,1,1,-1l);
try {
Object task1="task1";
Object clientCookie1="cookie1";
Object task2="task2";
Object clientCookie2="cookie2";
Object task3="task3";
Object clientCookie3="cookie3";
Object task4="task4";
Object clientCookie4="cookie4";
tsWrapper.controlScheduler(true);
tsWrapper.allocateTask(task1,hostsH1,priority2,clientCookie1);
tsWrapper.allocateTask(task2,hostsH1,priority2,clientCookie2);
tsWrapper.allocateTask(task3,hostsH1,priority2,clientCookie3);
while (true) {
tsWrapper.signalSchedulerRun();
tsWrapper.awaitSchedulerRun();
if (tsWrapper.ts.dagStats.numLocalAllocations == 2) {
break;
}
}
verify(tsWrapper.mockAppCallback,never()).preemptContainer(any(ContainerId.class));
ArgumentCaptor argumentCaptor=ArgumentCaptor.forClass(Object.class);
verify(tsWrapper.mockAppCallback,times(2)).taskAllocated(argumentCaptor.capture(),any(Object.class),any(Container.class));
assertEquals(2,argumentCaptor.getAllValues().size());
assertEquals(task1,argumentCaptor.getAllValues().get(0));
assertEquals(task2,argumentCaptor.getAllValues().get(1));
reset(tsWrapper.mockAppCallback);
tsWrapper.allocateTask(task4,hostsH1,priority1,clientCookie4);
while (true) {
tsWrapper.signalSchedulerRun();
tsWrapper.awaitSchedulerRun();
if (tsWrapper.ts.dagStats.numPreemptedTasks == 1) {
break;
}
}
verify(tsWrapper.mockAppCallback).preemptContainer(any(ContainerId.class));
tsWrapper.deallocateTask(task1,false,TaskAttemptEndReason.INTERNAL_PREEMPTION);
while (true) {
tsWrapper.signalSchedulerRun();
tsWrapper.awaitSchedulerRun();
if (tsWrapper.ts.dagStats.numTotalAllocations == 3) {
break;
}
}
verify(tsWrapper.mockAppCallback,times(1)).taskAllocated(eq(task4),eq(clientCookie4),any(Container.class));
}
finally {
tsWrapper.shutdown();
}
}
InternalCallVerifier EqualityVerifier
@Test(timeout=5000) public void testSimpleNoLocalityAllocation() throws IOException, InterruptedException {
TestTaskSchedulerServiceWrapper tsWrapper=new TestTaskSchedulerServiceWrapper();
try {
Priority priority1=Priority.newInstance(1);
Object task1=new Object();
Object clientCookie1=new Object();
tsWrapper.controlScheduler(true);
tsWrapper.allocateTask(task1,null,priority1,clientCookie1);
tsWrapper.signalSchedulerRun();
tsWrapper.awaitSchedulerRun();
verify(tsWrapper.mockAppCallback).taskAllocated(eq(task1),eq(clientCookie1),any(Container.class));
assertEquals(1,tsWrapper.ts.dagStats.numAllocationsNoLocalityRequest);
}
finally {
tsWrapper.shutdown();
}
}
InternalCallVerifier EqualityVerifier
@Test(timeout=5000) public void testForcedLocalityUnknownHost() throws IOException, InterruptedException {
Priority priority1=Priority.newInstance(1);
String[] hostsKnown=new String[]{HOST1};
String[] hostsUnknown=new String[]{HOST2};
TestTaskSchedulerServiceWrapper tsWrapper=new TestTaskSchedulerServiceWrapper(2000,hostsKnown,1,1,-1l);
try {
Object task1="task1";
Object clientCookie1="cookie1";
Object task2="task2";
Object clientCookie2="cookie2";
tsWrapper.controlScheduler(true);
tsWrapper.allocateTask(task1,hostsUnknown,priority1,clientCookie1);
tsWrapper.allocateTask(task2,hostsKnown,priority1,clientCookie2);
while (true) {
tsWrapper.signalSchedulerRun();
tsWrapper.awaitSchedulerRun();
if (tsWrapper.ts.dagStats.numTotalAllocations == 2) {
break;
}
}
ArgumentCaptor argumentCaptor=ArgumentCaptor.forClass(Object.class);
verify(tsWrapper.mockAppCallback,times(2)).taskAllocated(argumentCaptor.capture(),any(Object.class),any(Container.class));
assertEquals(2,argumentCaptor.getAllValues().size());
assertEquals(task1,argumentCaptor.getAllValues().get(0));
assertEquals(task2,argumentCaptor.getAllValues().get(1));
}
finally {
tsWrapper.shutdown();
}
}
InternalCallVerifier EqualityVerifier
@Test(timeout=5000) public void testNodeReEnabled() throws InterruptedException, IOException {
TestTaskSchedulerServiceWrapper tsWrapper=new TestTaskSchedulerServiceWrapper(1000l);
try {
Priority priority1=Priority.newInstance(1);
String[] hosts1=new String[]{HOST1};
String[] hosts2=new String[]{HOST2};
String[] hosts3=new String[]{HOST3};
Object task1=new Object();
Object clientCookie1=new Object();
Object task2=new Object();
Object clientCookie2=new Object();
Object task3=new Object();
Object clientCookie3=new Object();
tsWrapper.controlScheduler(true);
tsWrapper.allocateTask(task1,hosts1,priority1,clientCookie1);
tsWrapper.allocateTask(task2,hosts2,priority1,clientCookie2);
tsWrapper.allocateTask(task3,hosts3,priority1,clientCookie3);
while (true) {
tsWrapper.signalSchedulerRun();
tsWrapper.awaitSchedulerRun();
if (tsWrapper.ts.dagStats.numTotalAllocations == 3) {
break;
}
}
verify(tsWrapper.mockAppCallback,times(3)).taskAllocated(any(Object.class),any(Object.class),any(Container.class));
assertEquals(3,tsWrapper.ts.dagStats.numLocalAllocations);
assertEquals(0,tsWrapper.ts.dagStats.numAllocationsNoLocalityRequest);
assertEquals(3,tsWrapper.ts.dagStats.numTotalAllocations);
tsWrapper.resetAppCallback();
tsWrapper.rejectExecution(task1);
tsWrapper.rejectExecution(task2);
tsWrapper.rejectExecution(task3);
assertEquals(3,tsWrapper.ts.dagStats.numRejectedTasks);
assertEquals(3,tsWrapper.ts.instanceToNodeMap.size());
assertEquals(3,tsWrapper.ts.disabledNodesQueue.size());
Object task4=new Object();
Object clientCookie4=new Object();
Object task5=new Object();
Object clientCookie5=new Object();
Object task6=new Object();
Object clientCookie6=new Object();
tsWrapper.allocateTask(task4,hosts1,priority1,clientCookie4);
tsWrapper.allocateTask(task5,hosts2,priority1,clientCookie5);
tsWrapper.allocateTask(task6,hosts3,priority1,clientCookie6);
while (true) {
tsWrapper.signalSchedulerRun();
tsWrapper.awaitSchedulerRun();
if (tsWrapper.ts.dagStats.numTotalAllocations == 6) {
break;
}
}
ArgumentCaptor argumentCaptor=ArgumentCaptor.forClass(Container.class);
verify(tsWrapper.mockAppCallback,times(3)).taskAllocated(any(Object.class),any(Object.class),argumentCaptor.capture());
assertEquals(0,tsWrapper.ts.dagStats.numAllocationsNoLocalityRequest);
assertEquals(6,tsWrapper.ts.dagStats.numTotalAllocations);
}
finally {
tsWrapper.shutdown();
}
}
InternalCallVerifier EqualityVerifier
@Test(timeout=5000) public void testPreemption() throws InterruptedException, IOException {
Priority priority1=Priority.newInstance(1);
Priority priority2=Priority.newInstance(2);
String[] hosts=new String[]{HOST1};
TestTaskSchedulerServiceWrapper tsWrapper=new TestTaskSchedulerServiceWrapper(2000,hosts,1,1);
try {
Object task1="task1";
Object clientCookie1="cookie1";
Object task2="task2";
Object clientCookie2="cookie2";
Object task3="task3";
Object clientCookie3="cookie3";
Object task4="task4";
Object clientCookie4="cookie4";
tsWrapper.controlScheduler(true);
tsWrapper.allocateTask(task1,hosts,priority2,clientCookie1);
tsWrapper.allocateTask(task2,hosts,priority2,clientCookie2);
tsWrapper.allocateTask(task3,hosts,priority2,clientCookie3);
while (true) {
tsWrapper.signalSchedulerRun();
tsWrapper.awaitSchedulerRun();
if (tsWrapper.ts.dagStats.numLocalAllocations == 2) {
break;
}
}
verify(tsWrapper.mockAppCallback,times(2)).taskAllocated(any(Object.class),any(Object.class),any(Container.class));
assertEquals(2,tsWrapper.ts.dagStats.numLocalAllocations);
assertEquals(0,tsWrapper.ts.dagStats.numAllocationsNoLocalityRequest);
reset(tsWrapper.mockAppCallback);
tsWrapper.allocateTask(task4,hosts,priority1,clientCookie4);
while (true) {
tsWrapper.signalSchedulerRun();
tsWrapper.awaitSchedulerRun();
if (tsWrapper.ts.dagStats.numPreemptedTasks == 1) {
break;
}
}
verify(tsWrapper.mockAppCallback).preemptContainer(any(ContainerId.class));
tsWrapper.deallocateTask(task2,false,TaskAttemptEndReason.INTERNAL_PREEMPTION);
while (true) {
tsWrapper.signalSchedulerRun();
tsWrapper.awaitSchedulerRun();
if (tsWrapper.ts.dagStats.numTotalAllocations == 3) {
break;
}
}
verify(tsWrapper.mockAppCallback,times(1)).taskAllocated(eq(task4),eq(clientCookie4),any(Container.class));
}
finally {
tsWrapper.shutdown();
}
}
InternalCallVerifier EqualityVerifier NullVerifier PublicFieldVerifier HybridVerifier
@Test(timeout=5000) public void testNodeDisabled() throws IOException, InterruptedException {
TestTaskSchedulerServiceWrapper tsWrapper=new TestTaskSchedulerServiceWrapper(10000l);
try {
Priority priority1=Priority.newInstance(1);
String[] hosts1=new String[]{HOST1};
Object task1=new Object();
Object clientCookie1=new Object();
tsWrapper.controlScheduler(true);
tsWrapper.allocateTask(task1,hosts1,priority1,clientCookie1);
while (true) {
tsWrapper.signalSchedulerRun();
tsWrapper.awaitSchedulerRun();
if (tsWrapper.ts.dagStats.numTotalAllocations == 1) {
break;
}
}
verify(tsWrapper.mockAppCallback).taskAllocated(eq(task1),eq(clientCookie1),any(Container.class));
assertEquals(1,tsWrapper.ts.dagStats.numLocalAllocations);
assertEquals(0,tsWrapper.ts.dagStats.numAllocationsNoLocalityRequest);
assertEquals(0,tsWrapper.ts.dagStats.numNonLocalAllocations);
assertEquals(1,tsWrapper.ts.dagStats.numTotalAllocations);
tsWrapper.resetAppCallback();
tsWrapper.clock.setTime(10000l);
tsWrapper.rejectExecution(task1);
assertEquals(1,tsWrapper.ts.dagStats.numRejectedTasks);
assertEquals(3,tsWrapper.ts.instanceToNodeMap.size());
LlapTaskSchedulerService.NodeInfo disabledNodeInfo=tsWrapper.ts.disabledNodesQueue.peek();
assertNotNull(disabledNodeInfo);
assertEquals(HOST1,disabledNodeInfo.serviceInstance.getHost());
assertEquals((10000l),disabledNodeInfo.getDelay(TimeUnit.MILLISECONDS));
assertEquals((10000l + 10000l),disabledNodeInfo.expireTimeMillis);
Object task2=new Object();
Object clientCookie2=new Object();
tsWrapper.allocateTask(task2,hosts1,priority1,clientCookie2);
while (true) {
tsWrapper.signalSchedulerRun();
tsWrapper.awaitSchedulerRun();
if (tsWrapper.ts.dagStats.numTotalAllocations == 2) {
break;
}
}
verify(tsWrapper.mockAppCallback).taskAllocated(eq(task2),eq(clientCookie2),any(Container.class));
assertEquals(1,tsWrapper.ts.dagStats.numLocalAllocations);
assertEquals(0,tsWrapper.ts.dagStats.numAllocationsNoLocalityRequest);
assertEquals(1,tsWrapper.ts.dagStats.numNonLocalAllocations);
assertEquals(2,tsWrapper.ts.dagStats.numTotalAllocations);
}
finally {
tsWrapper.shutdown();
}
}
InternalCallVerifier EqualityVerifier
@Test(timeout=5000) public void testSimpleLocalAllocation() throws IOException, InterruptedException {
TestTaskSchedulerServiceWrapper tsWrapper=new TestTaskSchedulerServiceWrapper();
try {
Priority priority1=Priority.newInstance(1);
String[] hosts1=new String[]{HOST1};
Object task1=new Object();
Object clientCookie1=new Object();
tsWrapper.controlScheduler(true);
tsWrapper.allocateTask(task1,hosts1,priority1,clientCookie1);
tsWrapper.signalSchedulerRun();
tsWrapper.awaitSchedulerRun();
verify(tsWrapper.mockAppCallback).taskAllocated(eq(task1),eq(clientCookie1),any(Container.class));
assertEquals(1,tsWrapper.ts.dagStats.numLocalAllocations);
}
finally {
tsWrapper.shutdown();
}
}
Class: org.apache.hadoop.hive.metastore.TestAggregateStatsCache APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testBasicAddAndGet() throws Exception {
List partNames=preparePartNames(tables.get(0),1,9);
BloomFilter bloomFilter=prepareBloomFilter(partNames);
String tblName=tables.get(0);
String colName=tabCols.get(0);
int highVal=100, lowVal=10, numDVs=50, numNulls=5;
ColumnStatisticsObj aggrColStats=getDummyLongColStat(colName,highVal,lowVal,numDVs,numNulls);
cache.add(DB_NAME,tblName,colName,10,aggrColStats,bloomFilter);
AggrColStats aggrStatsCached=cache.get(DB_NAME,tblName,colName,partNames);
Assert.assertNotNull(aggrStatsCached);
ColumnStatisticsObj aggrColStatsCached=aggrStatsCached.getColStats();
Assert.assertEquals(aggrColStats,aggrColStatsCached);
aggrStatsCached=cache.get("dbNotThere",tblName,colName,partNames);
Assert.assertNull(aggrStatsCached);
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testAddGetWithVariance() throws Exception {
List partNames=preparePartNames(tables.get(0),1,9);
BloomFilter bloomFilter=prepareBloomFilter(partNames);
String tblName=tables.get(0);
String colName=tabCols.get(0);
int highVal=100, lowVal=10, numDVs=50, numNulls=5;
ColumnStatisticsObj aggrColStats=getDummyLongColStat(colName,highVal,lowVal,numDVs,numNulls);
cache.add(DB_NAME,tblName,colName,10,aggrColStats,bloomFilter);
partNames=preparePartNames(tables.get(0),1,5);
AggrColStats aggrStatsCached=cache.get(DB_NAME,tblName,colName,partNames);
Assert.assertNull(aggrStatsCached);
partNames=preparePartNames(tables.get(0),11,20);
aggrStatsCached=cache.get(DB_NAME,tblName,colName,partNames);
Assert.assertNull(aggrStatsCached);
partNames=preparePartNames(tables.get(0),1,8);
aggrStatsCached=cache.get(DB_NAME,tblName,colName,partNames);
Assert.assertNotNull(aggrStatsCached);
ColumnStatisticsObj aggrColStatsCached=aggrStatsCached.getColStats();
Assert.assertEquals(aggrColStats,aggrColStatsCached);
}
EqualityVerifier
@Test public void testCacheKey(){
Key k1=new Key("db","tbl1","col");
Key k2=new Key("db","tbl1","col");
Assert.assertEquals(k1,k2);
Key k3=new Key("db","tbl2","col");
Assert.assertNotEquals(k1,k3);
}
Class: org.apache.hadoop.hive.metastore.TestFilterHooks UtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testDummyFilterForPartition() throws Exception {
DummyMetaStoreFilterHookImpl.blockResults=true;
try {
assertNotNull(msc.getPartition(DBNAME1,TAB2,"name=value1"));
fail("getPartition() should fail with blocking mode");
}
catch ( NoSuchObjectException e) {
}
assertEquals(0,msc.getPartitionsByNames(DBNAME1,TAB2,Lists.newArrayList("name=value1")).size());
}
UtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testDummyFilterForDb() throws Exception {
DummyMetaStoreFilterHookImpl.blockResults=true;
try {
assertNotNull(msc.getDatabase(DBNAME1));
fail("getDatabase() should fail with blocking mode");
}
catch ( NoSuchObjectException e) {
}
assertEquals(0,msc.getDatabases("*").size());
assertEquals(0,msc.getAllDatabases().size());
assertEquals(0,msc.getDatabases(DBNAME1).size());
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testDefaultFilter() throws Exception {
assertNotNull(msc.getTable(DBNAME1,TAB1));
assertEquals(3,msc.getTables(DBNAME1,"*").size());
assertEquals(3,msc.getAllTables(DBNAME1).size());
assertEquals(1,msc.getTables(DBNAME1,TAB2).size());
assertEquals(0,msc.getAllTables(DBNAME2).size());
assertNotNull(msc.getDatabase(DBNAME1));
assertEquals(3,msc.getDatabases("*").size());
assertEquals(3,msc.getAllDatabases().size());
assertEquals(1,msc.getDatabases(DBNAME1).size());
assertNotNull(msc.getPartition(DBNAME1,TAB2,"name=value1"));
assertEquals(1,msc.getPartitionsByNames(DBNAME1,TAB2,Lists.newArrayList("name=value1")).size());
assertNotNull(msc.getIndex(DBNAME1,TAB1,INDEX1));
}
UtilityVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testDummyFilterForTables() throws Exception {
DummyMetaStoreFilterHookImpl.blockResults=true;
try {
msc.getTable(DBNAME1,TAB1);
fail("getTable() should fail with blocking mode");
}
catch ( NoSuchObjectException e) {
}
assertEquals(0,msc.getTables(DBNAME1,"*").size());
assertEquals(0,msc.getAllTables(DBNAME1).size());
assertEquals(0,msc.getTables(DBNAME1,TAB2).size());
}
Class: org.apache.hadoop.hive.metastore.TestHiveMetaStore InternalCallVerifier EqualityVerifier
@Test public void testDBOwner() throws NoSuchObjectException, MetaException, TException {
Database db=client.getDatabase(MetaStoreUtils.DEFAULT_DATABASE_NAME);
assertEquals(db.getOwnerName(),HiveMetaStore.PUBLIC);
assertEquals(db.getOwnerType(),PrincipalType.ROLE);
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
/**
* Test table objects can be retrieved in batches
* @throws Exception
*/
@Test public void testGetTableObjects() throws Exception {
String dbName="db";
List tableNames=Arrays.asList("table1","table2","table3","table4","table5");
silentDropDatabase(dbName);
Database db=new Database();
db.setName(dbName);
client.createDatabase(db);
for ( String tableName : tableNames) {
createTable(dbName,tableName);
}
List tableObjs=client.getTableObjectsByName(dbName,tableNames);
assertEquals(tableNames.size(),tableObjs.size());
for ( Table table : tableObjs) {
assertTrue(tableNames.contains(table.getTableName().toLowerCase()));
}
client.dropDatabase(dbName,true,true,true);
}
Class: org.apache.hadoop.hive.metastore.TestHiveMetaStorePartitionSpecs IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
/**
* Test to confirm that partitions can be added using PartitionSpecs.
*/
@Test public void testAddPartitions(){
try {
HiveMetaStoreClient hmsc=new HiveMetaStoreClient(hiveConf);
clearAndRecreateDB(hmsc);
createTable(hmsc,true);
Table table=hmsc.getTable(dbName,tableName);
populatePartitions(hmsc,table,Arrays.asList("isLocatedInTablePath","isLocatedOutsideTablePath"));
String targetTableName="cloned_" + tableName;
Table targetTable=new Table(table);
targetTable.setTableName(targetTableName);
StorageDescriptor targetTableSd=new StorageDescriptor(targetTable.getSd());
targetTableSd.setLocation(targetTableSd.getLocation().replace(tableName,targetTableName));
hmsc.createTable(targetTable);
PartitionSpecProxy partitionsForAddition=hmsc.listPartitionSpecsByFilter(dbName,tableName,"blurb = \"isLocatedInTablePath\"",-1);
partitionsForAddition.setTableName(targetTableName);
partitionsForAddition.setRootLocation(targetTableSd.getLocation());
Assert.assertEquals("Unexpected number of partitions added. ",partitionsForAddition.size(),hmsc.add_partitions_pspec(partitionsForAddition));
PartitionSpecProxy clonedPartitions=hmsc.listPartitionSpecs(dbName,targetTableName,-1);
Assert.assertEquals("Unexpected number of partitions returned. ",partitionsForAddition.size(),clonedPartitions.size());
PartitionSpecProxy.PartitionIterator sourceIterator=partitionsForAddition.getPartitionIterator(), targetIterator=clonedPartitions.getPartitionIterator();
while (targetIterator.hasNext()) {
Partition sourcePartition=sourceIterator.next(), targetPartition=targetIterator.next();
Assert.assertEquals("Mismatched values.",sourcePartition.getValues(),targetPartition.getValues());
Assert.assertEquals("Mismatched locations.",sourcePartition.getSd().getLocation(),targetPartition.getSd().getLocation());
}
}
catch ( Throwable t) {
LOG.error("Unexpected Exception!",t);
t.printStackTrace();
Assert.assertTrue("Unexpected Exception!",false);
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
/**
* Test to confirm that Partition-grouping behaves correctly when Table-schemas evolve.
* Partitions must be grouped by location and schema.
*/
@Test public void testFetchingPartitionsWithDifferentSchemas(){
try {
HiveMetaStoreClient hmsc=new HiveMetaStoreClient(hiveConf);
clearAndRecreateDB(hmsc);
createTable(hmsc,true);
Table table=hmsc.getTable(dbName,tableName);
populatePartitions(hmsc,table,Arrays.asList("isLocatedInTablePath","isLocatedOutsideTablePath"));
List fields=table.getSd().getCols();
fields.add(new FieldSchema("goo","string","Entirely new column. Doesn't apply to older partitions."));
table.getSd().setCols(fields);
hmsc.alter_table(dbName,tableName,table);
table=hmsc.getTable(dbName,tableName);
Assert.assertEquals("Unexpected number of table columns.",3,table.getSd().getColsSize());
populatePartitions(hmsc,table,Arrays.asList("hasNewColumn"));
PartitionSpecProxy partitionSpecProxy=hmsc.listPartitionSpecs(dbName,tableName,-1);
Assert.assertEquals("Unexpected number of partitions.",nDates * 3,partitionSpecProxy.size());
Assert.assertTrue("Unexpected type of PartitionSpecProxy.",partitionSpecProxy instanceof CompositePartitionSpecProxy);
CompositePartitionSpecProxy compositePartitionSpecProxy=(CompositePartitionSpecProxy)partitionSpecProxy;
List partitionSpecs=compositePartitionSpecProxy.toPartitionSpec();
Assert.assertTrue("PartitionSpec[0] should have been a SharedSDPartitionSpec.",partitionSpecs.get(0).isSetSharedSDPartitionSpec());
Assert.assertEquals("PartitionSpec[0] should use the table-path as the common root location. ",table.getSd().getLocation(),partitionSpecs.get(0).getRootPath());
Assert.assertTrue("PartitionSpec[1] should have been a SharedSDPartitionSpec.",partitionSpecs.get(1).isSetSharedSDPartitionSpec());
Assert.assertEquals("PartitionSpec[1] should use the table-path as the common root location. ",table.getSd().getLocation(),partitionSpecs.get(1).getRootPath());
Assert.assertTrue("PartitionSpec[2] should have been a ListComposingPartitionSpec.",partitionSpecs.get(2).isSetPartitionList());
PartitionSpecProxy.PartitionIterator iterator=partitionSpecProxy.getPartitionIterator();
Map> blurbToPartitionList=new HashMap>(3);
while (iterator.hasNext()) {
Partition partition=iterator.next();
String blurb=partition.getValues().get(1);
if (!blurbToPartitionList.containsKey(blurb)) {
blurbToPartitionList.put(blurb,new ArrayList(nDates));
}
blurbToPartitionList.get(blurb).add(partition);
}
for ( Partition partition : blurbToPartitionList.get("isLocatedOutsideTablePath")) {
Assert.assertEquals("Unexpected number of columns.",2,partition.getSd().getCols().size());
Assert.assertEquals("Unexpected first column.","foo",partition.getSd().getCols().get(0).getName());
Assert.assertEquals("Unexpected second column.","bar",partition.getSd().getCols().get(1).getName());
String partitionLocation=partition.getSd().getLocation();
String tableLocation=table.getSd().getLocation();
Assert.assertTrue("Unexpected partition location: " + partitionLocation + ". "+ "Partition should have been outside table location: "+ tableLocation,!partitionLocation.startsWith(tableLocation));
}
for ( Partition partition : blurbToPartitionList.get("isLocatedInTablePath")) {
Assert.assertEquals("Unexpected number of columns.",2,partition.getSd().getCols().size());
Assert.assertEquals("Unexpected first column.","foo",partition.getSd().getCols().get(0).getName());
Assert.assertEquals("Unexpected second column.","bar",partition.getSd().getCols().get(1).getName());
String partitionLocation=partition.getSd().getLocation();
String tableLocation=table.getSd().getLocation();
Assert.assertTrue("Unexpected partition location: " + partitionLocation + ". "+ "Partition should have been within table location: "+ tableLocation,partitionLocation.startsWith(tableLocation));
}
for ( Partition partition : blurbToPartitionList.get("hasNewColumn")) {
Assert.assertEquals("Unexpected number of columns.",3,partition.getSd().getCols().size());
Assert.assertEquals("Unexpected first column.","foo",partition.getSd().getCols().get(0).getName());
Assert.assertEquals("Unexpected second column.","bar",partition.getSd().getCols().get(1).getName());
Assert.assertEquals("Unexpected third column.","goo",partition.getSd().getCols().get(2).getName());
String partitionLocation=partition.getSd().getLocation();
String tableLocation=table.getSd().getLocation();
Assert.assertTrue("Unexpected partition location: " + partitionLocation + ". "+ "Partition should have been within table location: "+ tableLocation,partitionLocation.startsWith(tableLocation));
}
}
catch ( Throwable t) {
LOG.error("Unexpected Exception!",t);
t.printStackTrace();
Assert.assertTrue("Unexpected Exception!",false);
}
}
Class: org.apache.hadoop.hive.metastore.TestHiveMetaStoreTxns BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testTxns() throws Exception {
List tids=client.openTxns("me",3).getTxn_ids();
Assert.assertEquals(1L,(long)tids.get(0));
Assert.assertEquals(2L,(long)tids.get(1));
Assert.assertEquals(3L,(long)tids.get(2));
client.rollbackTxn(1);
client.commitTxn(2);
ValidTxnList validTxns=client.getValidTxns();
Assert.assertFalse(validTxns.isTxnValid(1));
Assert.assertTrue(validTxns.isTxnValid(2));
Assert.assertFalse(validTxns.isTxnValid(3));
Assert.assertFalse(validTxns.isTxnValid(4));
}
InternalCallVerifier EqualityVerifier
@Test public void testTxnRange() throws Exception {
ValidTxnList validTxns=client.getValidTxns();
Assert.assertEquals(ValidTxnList.RangeResponse.NONE,validTxns.isTxnRangeValid(1L,3L));
List tids=client.openTxns("me",5).getTxn_ids();
HeartbeatTxnRangeResponse rsp=client.heartbeatTxnRange(1,5);
Assert.assertEquals(0,rsp.getNosuch().size());
Assert.assertEquals(0,rsp.getAborted().size());
client.rollbackTxn(1L);
client.commitTxn(2L);
client.commitTxn(3L);
client.commitTxn(4L);
validTxns=client.getValidTxns();
System.out.println("validTxns = " + validTxns);
Assert.assertEquals(ValidTxnList.RangeResponse.ALL,validTxns.isTxnRangeValid(2L,2L));
Assert.assertEquals(ValidTxnList.RangeResponse.ALL,validTxns.isTxnRangeValid(2L,3L));
Assert.assertEquals(ValidTxnList.RangeResponse.ALL,validTxns.isTxnRangeValid(2L,4L));
Assert.assertEquals(ValidTxnList.RangeResponse.ALL,validTxns.isTxnRangeValid(3L,4L));
Assert.assertEquals(ValidTxnList.RangeResponse.SOME,validTxns.isTxnRangeValid(1L,4L));
Assert.assertEquals(ValidTxnList.RangeResponse.SOME,validTxns.isTxnRangeValid(2L,5L));
Assert.assertEquals(ValidTxnList.RangeResponse.SOME,validTxns.isTxnRangeValid(1L,2L));
Assert.assertEquals(ValidTxnList.RangeResponse.SOME,validTxns.isTxnRangeValid(4L,5L));
Assert.assertEquals(ValidTxnList.RangeResponse.NONE,validTxns.isTxnRangeValid(1L,1L));
Assert.assertEquals(ValidTxnList.RangeResponse.NONE,validTxns.isTxnRangeValid(5L,10L));
validTxns=new ValidReadTxnList("10:4:5:6");
Assert.assertEquals(ValidTxnList.RangeResponse.NONE,validTxns.isTxnRangeValid(4,6));
Assert.assertEquals(ValidTxnList.RangeResponse.ALL,validTxns.isTxnRangeValid(7,10));
Assert.assertEquals(ValidTxnList.RangeResponse.SOME,validTxns.isTxnRangeValid(7,11));
Assert.assertEquals(ValidTxnList.RangeResponse.SOME,validTxns.isTxnRangeValid(3,6));
Assert.assertEquals(ValidTxnList.RangeResponse.SOME,validTxns.isTxnRangeValid(4,7));
Assert.assertEquals(ValidTxnList.RangeResponse.SOME,validTxns.isTxnRangeValid(1,12));
Assert.assertEquals(ValidTxnList.RangeResponse.ALL,validTxns.isTxnRangeValid(1,3));
}
InternalCallVerifier EqualityVerifier
@Test public void testLocksWithTxn() throws Exception {
long txnid=client.openTxn("me");
LockRequestBuilder rqstBuilder=new LockRequestBuilder();
rqstBuilder.setTransactionId(txnid).addLockComponent(new LockComponentBuilder().setDbName("mydb").setTableName("mytable").setPartitionName("mypartition").setExclusive().build()).addLockComponent(new LockComponentBuilder().setDbName("mydb").setTableName("yourtable").setSemiShared().build()).addLockComponent(new LockComponentBuilder().setDbName("yourdb").setShared().build()).setUser("fred");
LockResponse res=client.lock(rqstBuilder.build());
Assert.assertEquals(1L,res.getLockid());
Assert.assertEquals(LockState.ACQUIRED,res.getState());
res=client.checkLock(1);
Assert.assertEquals(1L,res.getLockid());
Assert.assertEquals(LockState.ACQUIRED,res.getState());
client.heartbeat(txnid,1);
client.commitTxn(txnid);
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testOpenTxnNotExcluded() throws Exception {
List tids=client.openTxns("me",3).getTxn_ids();
Assert.assertEquals(1L,(long)tids.get(0));
Assert.assertEquals(2L,(long)tids.get(1));
Assert.assertEquals(3L,(long)tids.get(2));
client.rollbackTxn(1);
client.commitTxn(2);
ValidTxnList validTxns=client.getValidTxns(3);
Assert.assertFalse(validTxns.isTxnValid(1));
Assert.assertTrue(validTxns.isTxnValid(2));
Assert.assertTrue(validTxns.isTxnValid(3));
Assert.assertFalse(validTxns.isTxnValid(4));
}
InternalCallVerifier EqualityVerifier
@Test public void testLocks() throws Exception {
LockRequestBuilder rqstBuilder=new LockRequestBuilder();
rqstBuilder.addLockComponent(new LockComponentBuilder().setDbName("mydb").setTableName("mytable").setPartitionName("mypartition").setExclusive().build());
rqstBuilder.addLockComponent(new LockComponentBuilder().setDbName("mydb").setTableName("yourtable").setSemiShared().build());
rqstBuilder.addLockComponent(new LockComponentBuilder().setDbName("yourdb").setShared().build());
rqstBuilder.setUser("fred");
LockResponse res=client.lock(rqstBuilder.build());
Assert.assertEquals(1L,res.getLockid());
Assert.assertEquals(LockState.ACQUIRED,res.getState());
res=client.checkLock(1);
Assert.assertEquals(1L,res.getLockid());
Assert.assertEquals(LockState.ACQUIRED,res.getState());
client.heartbeat(0,1);
client.unlock(1);
}
BranchVerifier UtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void stringifyValidTxns() throws Exception {
ValidTxnList validTxns=new ValidReadTxnList("1:");
String asString=validTxns.toString();
Assert.assertEquals("1:",asString);
validTxns=new ValidReadTxnList(asString);
Assert.assertEquals(1,validTxns.getHighWatermark());
Assert.assertNotNull(validTxns.getInvalidTransactions());
Assert.assertEquals(0,validTxns.getInvalidTransactions().length);
asString=validTxns.toString();
Assert.assertEquals("1:",asString);
validTxns=new ValidReadTxnList(asString);
Assert.assertEquals(1,validTxns.getHighWatermark());
Assert.assertNotNull(validTxns.getInvalidTransactions());
Assert.assertEquals(0,validTxns.getInvalidTransactions().length);
validTxns=new ValidReadTxnList("10:5:3");
asString=validTxns.toString();
if (!asString.equals("10:3:5") && !asString.equals("10:5:3")) {
Assert.fail("Unexpected string value " + asString);
}
validTxns=new ValidReadTxnList(asString);
Assert.assertEquals(10,validTxns.getHighWatermark());
Assert.assertNotNull(validTxns.getInvalidTransactions());
Assert.assertEquals(2,validTxns.getInvalidTransactions().length);
boolean sawThree=false, sawFive=false;
for ( long tid : validTxns.getInvalidTransactions()) {
if (tid == 3) sawThree=true;
else if (tid == 5) sawFive=true;
else Assert.fail("Unexpected value " + tid);
}
Assert.assertTrue(sawThree);
Assert.assertTrue(sawFive);
}
Class: org.apache.hadoop.hive.metastore.TestLockRequestBuilder InternalCallVerifier EqualityVerifier
@Test public void testSRExDb(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.SHARED_READ,LockLevel.DB,"mydb");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.EXCLUSIVE,locks.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void testSWExDb(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.EXCLUSIVE,locks.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void testExSWTable(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.EXCLUSIVE,locks.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void testSRSRTable(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.SHARED_READ,LockLevel.DB,"mydb");
comp.setTablename("mytable");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.SHARED_READ,LockLevel.DB,"mydb");
comp.setTablename("mytable");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.SHARED_READ,locks.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void testSRSWDb(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.SHARED_READ,LockLevel.DB,"mydb");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.SHARED_WRITE,locks.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void testTwoSeparatePartitions(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypart");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("yourpart");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(2,locks.size());
}
InternalCallVerifier EqualityVerifier
@Test public void testTwoSeparateDbs(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
bldr.addLockComponent(comp).setUser("fred");
comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"yourdb");
bldr.addLockComponent(comp);
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(2,locks.size());
}
InternalCallVerifier EqualityVerifier
@Test public void testSRSWPart(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.SHARED_READ,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypart");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypart");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.SHARED_WRITE,locks.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void testSWSWDb(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.SHARED_WRITE,locks.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void testSWSRDb(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.SHARED_READ,LockLevel.DB,"mydb");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.SHARED_WRITE,locks.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void testExExPart(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypart");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypart");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
}
InternalCallVerifier EqualityVerifier
@Test public void testSWExPart(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypart");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypart");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.EXCLUSIVE,locks.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void testExSRPart(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypart");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.SHARED_READ,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypart");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.EXCLUSIVE,locks.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void testSWSRPart(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypart");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.SHARED_READ,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypart");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.SHARED_WRITE,locks.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void testExExTable(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
}
InternalCallVerifier EqualityVerifier
@Test public void testSWSWPart(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypart");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypart");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.SHARED_WRITE,locks.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void testTwoSeparateTables(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
comp.setTablename("yourtable");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(2,locks.size());
}
InternalCallVerifier EqualityVerifier
@Test public void testSWSRTable(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.SHARED_READ,LockLevel.DB,"mydb");
comp.setTablename("mytable");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.SHARED_WRITE,locks.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void testExExDb(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
}
InternalCallVerifier EqualityVerifier
@Test public void testExSRDb(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
bldr.addLockComponent(comp).setUser("fred");
comp=new LockComponent(LockType.SHARED_READ,LockLevel.DB,"mydb");
bldr.addLockComponent(comp);
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.EXCLUSIVE,locks.get(0).getType());
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void noUser(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
bldr.addLockComponent(comp);
boolean caughtException=false;
try {
LockRequest req=bldr.build();
}
catch ( RuntimeException e) {
Assert.assertEquals("Cannot build a lock without giving a user",e.getMessage());
caughtException=true;
}
Assert.assertTrue(caughtException);
}
InternalCallVerifier EqualityVerifier
@Test public void testExSRTable(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.SHARED_READ,LockLevel.DB,"mydb");
comp.setTablename("mytable");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.EXCLUSIVE,locks.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void testSRExTable(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.SHARED_READ,LockLevel.DB,"mydb");
comp.setTablename("mytable");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.EXCLUSIVE,locks.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void testExSWDb(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
bldr.addLockComponent(comp).setUser("fred");
comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
bldr.addLockComponent(comp);
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.EXCLUSIVE,locks.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void testSRSRDb(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.SHARED_READ,LockLevel.DB,"mydb");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.SHARED_READ,LockLevel.DB,"mydb");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.SHARED_READ,locks.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void testDbTable() throws Exception {
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
bldr.addLockComponent(comp).setUser("fred");
comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
bldr.addLockComponent(comp);
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(2,locks.size());
Assert.assertEquals("fred",req.getUser());
Assert.assertEquals(InetAddress.getLocalHost().getHostName(),req.getHostname());
}
InternalCallVerifier EqualityVerifier
@Test public void testSRExPart(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.SHARED_READ,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypart");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypart");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.EXCLUSIVE,locks.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void testSRSWTable(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.SHARED_READ,LockLevel.DB,"mydb");
comp.setTablename("mytable");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.SHARED_WRITE,locks.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void testTablePartition(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
bldr.addLockComponent(comp).setUser(null);
comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypartition");
bldr.addLockComponent(comp);
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(2,locks.size());
Assert.assertEquals("unknown",req.getUser());
}
InternalCallVerifier EqualityVerifier
@Test public void testSWSWTable(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.SHARED_WRITE,locks.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void testSWExTable(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.EXCLUSIVE,locks.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void testExSWPart(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypart");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypart");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.EXCLUSIVE,locks.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void testSRSRPart(){
LockRequestBuilder bldr=new LockRequestBuilder();
LockComponent comp=new LockComponent(LockType.SHARED_READ,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypart");
bldr.addLockComponent(comp);
comp=new LockComponent(LockType.SHARED_READ,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypart");
bldr.addLockComponent(comp).setUser("fred");
LockRequest req=bldr.build();
List locks=req.getComponent();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(LockType.SHARED_READ,locks.get(0).getType());
}
Class: org.apache.hadoop.hive.metastore.TestObjectStore InternalCallVerifier EqualityVerifier
/**
* Tests partition operations
*/
@Test public void testPartitionOps() throws MetaException, InvalidObjectException, NoSuchObjectException, InvalidInputException {
Database db1=new Database(DB1,"description","locationurl",null);
objectStore.createDatabase(db1);
StorageDescriptor sd=new StorageDescriptor(null,"location",null,null,false,0,new SerDeInfo("SerDeName","serializationLib",null),null,null,null);
HashMap tableParams=new HashMap();
tableParams.put("EXTERNAL","false");
FieldSchema partitionKey1=new FieldSchema("Country",serdeConstants.STRING_TYPE_NAME,"");
FieldSchema partitionKey2=new FieldSchema("State",serdeConstants.STRING_TYPE_NAME,"");
Table tbl1=new Table(TABLE1,DB1,"owner",1,2,3,sd,Arrays.asList(partitionKey1,partitionKey2),tableParams,"viewOriginalText","viewExpandedText","MANAGED_TABLE");
objectStore.createTable(tbl1);
HashMap partitionParams=new HashMap();
partitionParams.put("PARTITION_LEVEL_PRIVILEGE","true");
List value1=Arrays.asList("US","CA");
Partition part1=new Partition(value1,DB1,TABLE1,111,111,sd,partitionParams);
objectStore.addPartition(part1);
List value2=Arrays.asList("US","MA");
Partition part2=new Partition(value2,DB1,TABLE1,222,222,sd,partitionParams);
objectStore.addPartition(part2);
Deadline.startTimer("getPartition");
List partitions=objectStore.getPartitions(DB1,TABLE1,10);
Assert.assertEquals(2,partitions.size());
Assert.assertEquals(111,partitions.get(0).getCreateTime());
Assert.assertEquals(222,partitions.get(1).getCreateTime());
int numPartitions=objectStore.getNumPartitionsByFilter(DB1,TABLE1,"");
Assert.assertEquals(partitions.size(),numPartitions);
numPartitions=objectStore.getNumPartitionsByFilter(DB1,TABLE1,"country = \"US\"");
Assert.assertEquals(2,numPartitions);
objectStore.dropPartition(DB1,TABLE1,value1);
partitions=objectStore.getPartitions(DB1,TABLE1,10);
Assert.assertEquals(1,partitions.size());
Assert.assertEquals(222,partitions.get(0).getCreateTime());
objectStore.dropPartition(DB1,TABLE1,value2);
objectStore.dropTable(DB1,TABLE1);
objectStore.dropDatabase(DB1);
}
InternalCallVerifier EqualityVerifier
/**
* Test master keys operation
*/
@Test public void testMasterKeyOps() throws MetaException, NoSuchObjectException {
int id1=objectStore.addMasterKey(KEY1);
int id2=objectStore.addMasterKey(KEY2);
String[] keys=objectStore.getMasterKeys();
Assert.assertEquals(2,keys.length);
Assert.assertEquals(KEY1,keys[0]);
Assert.assertEquals(KEY2,keys[1]);
objectStore.updateMasterKey(id1,"new" + KEY1);
objectStore.updateMasterKey(id2,"new" + KEY2);
keys=objectStore.getMasterKeys();
Assert.assertEquals(2,keys.length);
Assert.assertEquals("new" + KEY1,keys[0]);
Assert.assertEquals("new" + KEY2,keys[1]);
objectStore.removeMasterKey(id1);
keys=objectStore.getMasterKeys();
Assert.assertEquals(1,keys.length);
Assert.assertEquals("new" + KEY2,keys[0]);
objectStore.removeMasterKey(id2);
}
InternalCallVerifier EqualityVerifier
/**
* Test role operation
*/
@Test public void testRoleOps() throws InvalidObjectException, MetaException, NoSuchObjectException {
objectStore.addRole(ROLE1,OWNER);
objectStore.addRole(ROLE2,OWNER);
List roles=objectStore.listRoleNames();
Assert.assertEquals(2,roles.size());
Assert.assertEquals(ROLE2,roles.get(1));
Role role1=objectStore.getRole(ROLE1);
Assert.assertEquals(OWNER,role1.getOwnerName());
objectStore.grantRole(role1,USER1,PrincipalType.USER,OWNER,PrincipalType.ROLE,true);
objectStore.revokeRole(role1,USER1,PrincipalType.USER,false);
objectStore.removeRole(ROLE1);
}
InternalCallVerifier EqualityVerifier
/**
* Test table operations
*/
@Test public void testTableOps() throws MetaException, InvalidObjectException, NoSuchObjectException, InvalidInputException {
Database db1=new Database(DB1,"description","locationurl",null);
objectStore.createDatabase(db1);
StorageDescriptor sd=new StorageDescriptor(null,"location",null,null,false,0,new SerDeInfo("SerDeName","serializationLib",null),null,null,null);
HashMap params=new HashMap();
params.put("EXTERNAL","false");
Table tbl1=new Table(TABLE1,DB1,"owner",1,2,3,sd,null,params,"viewOriginalText","viewExpandedText","MANAGED_TABLE");
objectStore.createTable(tbl1);
List tables=objectStore.getAllTables(DB1);
Assert.assertEquals(1,tables.size());
Assert.assertEquals(TABLE1,tables.get(0));
Table newTbl1=new Table("new" + TABLE1,DB1,"owner",1,2,3,sd,null,params,"viewOriginalText","viewExpandedText","MANAGED_TABLE");
objectStore.alterTable(DB1,TABLE1,newTbl1);
tables=objectStore.getTables(DB1,"new*");
Assert.assertEquals(1,tables.size());
Assert.assertEquals("new" + TABLE1,tables.get(0));
objectStore.dropTable(DB1,"new" + TABLE1);
tables=objectStore.getAllTables(DB1);
Assert.assertEquals(0,tables.size());
objectStore.dropDatabase(DB1);
}
InternalCallVerifier EqualityVerifier
/**
* Test database operations
*/
@Test public void testDatabaseOps() throws MetaException, InvalidObjectException, NoSuchObjectException {
Database db1=new Database(DB1,"description","locationurl",null);
Database db2=new Database(DB2,"description","locationurl",null);
objectStore.createDatabase(db1);
objectStore.createDatabase(db2);
List databases=objectStore.getAllDatabases();
Assert.assertEquals(2,databases.size());
Assert.assertEquals(DB1,databases.get(0));
Assert.assertEquals(DB2,databases.get(1));
objectStore.dropDatabase(DB1);
databases=objectStore.getAllDatabases();
Assert.assertEquals(1,databases.size());
Assert.assertEquals(DB2,databases.get(0));
objectStore.dropDatabase(DB2);
}
Class: org.apache.hadoop.hive.metastore.TestServerSpecificConfig EqualityVerifier
/**
* Test to ensure that HiveConf does not try to load hivemetastore-site.xml,
* when remote metastore is used.
* @throws IOException
* @throws Throwable
*/
@Test public void testHiveMetastoreRemoteConfig() throws IOException, Throwable {
setHiveSiteWithRemoteMetastore();
resetDefaults();
HiveConf conf=new HiveConf();
verifyMetastoreConfNotLoaded(conf);
assertEquals("from.hive-site.xml",conf.get("hive.dummyparam.test.server.specific.config.override"));
new HiveServer2();
conf=new HiveConf();
verifyHS2ConfParams(conf);
verifyMetastoreConfNotLoaded(conf);
}
BooleanVerifier EqualityVerifier HybridVerifier
/**
* Ensure that system properties still get precedence. Config params set as
* -hiveconf on commandline get set as system properties They should have the
* final say
*/
@Test public void testSystemPropertyPrecedence(){
final String OVERRIDE_KEY="hive.conf.restricted.list";
try {
HiveConf.setHiveSiteLocation(oldDefaultHiveSite);
System.setProperty(OVERRIDE_KEY,"from.sysprop");
HiveConf conf=new HiveConf();
assertEquals("from.sysprop",conf.get(OVERRIDE_KEY));
new HiveServer2();
conf=new HiveConf();
assertTrue(HiveConf.isLoadHiveServer2Config());
assertEquals("from.sysprop",conf.get(OVERRIDE_KEY));
}
finally {
System.getProperties().remove(OVERRIDE_KEY);
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* Verify if appropriate server configuration (metastore, hiveserver2) get
* loaded when the embedded clients are loaded
* Checks values used in the configs used for testing.
* @throws IOException
* @throws Throwable
*/
@Test public void testServerConfigsEmbeddedMetastore() throws IOException, Throwable {
HiveConf.setHiveSiteLocation(oldDefaultHiveSite);
HiveConf conf=new HiveConf();
assertTrue(HiveConf.isLoadMetastoreConfig());
assertEquals("from.hivemetastore-site.xml",conf.get("hive.dummyparam.test.server.specific.config.override"));
assertEquals("from.hivemetastore-site.xml",conf.get("hive.dummyparam.test.server.specific.config.metastoresite"));
assertEquals("from.hive-site.xml",conf.get("hive.dummyparam.test.server.specific.config.hivesite"));
assertFalse(HiveConf.isLoadHiveServer2Config());
assertNull(conf.get("hive.dummyparam.test.server.specific.config.hiveserver2site"));
new HiveServer2();
conf=new HiveConf();
verifyHS2ConfParams(conf);
assertEquals("from.hivemetastore-site.xml",conf.get("hive.dummyparam.test.server.specific.config.metastoresite"));
}
Class: org.apache.hadoop.hive.metastore.hbase.TestHBaseAggrStatsCacheIntegration InternalCallVerifier EqualityVerifier
@Test public void hit() throws Exception {
String dbName="default";
String tableName="hit";
List partVals1=Arrays.asList("today");
List partVals2=Arrays.asList("yesterday");
long now=System.currentTimeMillis();
List cols=new ArrayList<>();
cols.add(new FieldSchema("col1","boolean","nocomment"));
cols.add(new FieldSchema("col2","varchar","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,Collections.emptyMap());
List partCols=new ArrayList<>();
partCols.add(new FieldSchema("ds","string",""));
Table table=new Table(tableName,dbName,"me",(int)now,(int)now,0,sd,partCols,Collections.emptyMap(),null,null,null);
store.createTable(table);
for ( List partVals : Arrays.asList(partVals1,partVals2)) {
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/default/hit/ds=" + partVals.get(0));
Partition part=new Partition(partVals,dbName,tableName,(int)now,(int)now,psd,Collections.emptyMap());
store.addPartition(part);
ColumnStatistics cs=new ColumnStatistics();
ColumnStatisticsDesc desc=new ColumnStatisticsDesc(false,dbName,tableName);
desc.setLastAnalyzed(now);
desc.setPartName("ds=" + partVals.get(0));
cs.setStatsDesc(desc);
ColumnStatisticsObj obj=new ColumnStatisticsObj();
obj.setColName("col1");
obj.setColType("boolean");
ColumnStatisticsData data=new ColumnStatisticsData();
BooleanColumnStatsData bcsd=new BooleanColumnStatsData();
bcsd.setNumFalses(10);
bcsd.setNumTrues(20);
bcsd.setNumNulls(30);
data.setBooleanStats(bcsd);
obj.setStatsData(data);
cs.addToStatsObj(obj);
obj=new ColumnStatisticsObj();
obj.setColName("col2");
obj.setColType("varchar");
data=new ColumnStatisticsData();
StringColumnStatsData scsd=new StringColumnStatsData();
scsd.setAvgColLen(10.3);
scsd.setMaxColLen(2000);
scsd.setNumNulls(3);
scsd.setNumDVs(12342);
data.setStringStats(scsd);
obj.setStatsData(data);
cs.addToStatsObj(obj);
store.updatePartitionColumnStatistics(cs,partVals);
}
Checker statChecker=new Checker(){
@Override public void checkStats( AggrStats aggrStats) throws Exception {
Assert.assertEquals(2,aggrStats.getPartsFound());
Assert.assertEquals(2,aggrStats.getColStatsSize());
ColumnStatisticsObj cso=aggrStats.getColStats().get(0);
Assert.assertEquals("col1",cso.getColName());
Assert.assertEquals("boolean",cso.getColType());
BooleanColumnStatsData bcsd=cso.getStatsData().getBooleanStats();
Assert.assertEquals(20,bcsd.getNumFalses());
Assert.assertEquals(40,bcsd.getNumTrues());
Assert.assertEquals(60,bcsd.getNumNulls());
cso=aggrStats.getColStats().get(1);
Assert.assertEquals("col2",cso.getColName());
Assert.assertEquals("varchar",cso.getColType());
StringColumnStatsData scsd=cso.getStatsData().getStringStats();
Assert.assertEquals(10.3,scsd.getAvgColLen(),0.1);
Assert.assertEquals(2000,scsd.getMaxColLen());
Assert.assertEquals(6,scsd.getNumNulls());
Assert.assertEquals(12342,scsd.getNumDVs());
}
}
;
AggrStats aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=today","ds=yesterday"),Arrays.asList("col1","col2"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(2,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(2,store.backdoor().getStatsCache().misses.getCnt());
aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=yesterday","ds=today"),Arrays.asList("col1","col2"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(4,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(2,store.backdoor().getStatsCache().misses.getCnt());
store.backdoor().getStatsCache().flushMemory();
aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=today","ds=yesterday"),Arrays.asList("col1","col2"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(2,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(6,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(2,store.backdoor().getStatsCache().misses.getCnt());
}
EqualityVerifier
@Test public void alterInvalidation() throws Exception {
try {
String dbName="default";
String tableName="ai";
List partVals1=Arrays.asList("today");
List partVals2=Arrays.asList("yesterday");
List partVals3=Arrays.asList("tomorrow");
long now=System.currentTimeMillis();
List cols=new ArrayList<>();
cols.add(new FieldSchema("col1","boolean","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,Collections.emptyMap());
List partCols=new ArrayList<>();
partCols.add(new FieldSchema("ds","string",""));
Table table=new Table(tableName,dbName,"me",(int)now,(int)now,0,sd,partCols,Collections.emptyMap(),null,null,null);
store.createTable(table);
Partition[] partitions=new Partition[3];
int partnum=0;
for ( List partVals : Arrays.asList(partVals1,partVals2,partVals3)) {
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/default/invalidation/ds=" + partVals.get(0));
Partition part=new Partition(partVals,dbName,tableName,(int)now,(int)now,psd,Collections.emptyMap());
partitions[partnum++]=part;
store.addPartition(part);
ColumnStatistics cs=new ColumnStatistics();
ColumnStatisticsDesc desc=new ColumnStatisticsDesc(false,dbName,tableName);
desc.setLastAnalyzed(now);
desc.setPartName("ds=" + partVals.get(0));
cs.setStatsDesc(desc);
ColumnStatisticsObj obj=new ColumnStatisticsObj();
obj.setColName("col1");
obj.setColType("boolean");
ColumnStatisticsData data=new ColumnStatisticsData();
BooleanColumnStatsData bcsd=new BooleanColumnStatsData();
bcsd.setNumFalses(10);
bcsd.setNumTrues(20);
bcsd.setNumNulls(30);
data.setBooleanStats(bcsd);
obj.setStatsData(data);
cs.addToStatsObj(obj);
store.updatePartitionColumnStatistics(cs,partVals);
}
AggrStats aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=today","ds=tomorrow"),Arrays.asList("col1"));
aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=today","ds=yesterday"),Arrays.asList("col1"));
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(2,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(2,store.backdoor().getStatsCache().misses.getCnt());
store.backdoor().getStatsCache().wakeInvalidator();
Partition newPart=new Partition(partitions[2]);
newPart.setLastAccessTime((int)System.currentTimeMillis());
store.alterPartition(dbName,tableName,partVals3,newPart);
store.backdoor().getStatsCache().setRunInvalidatorEvery(100);
store.backdoor().getStatsCache().wakeInvalidator();
aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=tomorrow","ds=today"),Arrays.asList("col1"));
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(3,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(3,store.backdoor().getStatsCache().misses.getCnt());
aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=yesterday","ds=today"),Arrays.asList("col1"));
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(4,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(3,store.backdoor().getStatsCache().misses.getCnt());
}
finally {
store.backdoor().getStatsCache().setRunInvalidatorEvery(5000);
store.backdoor().getStatsCache().setMaxTimeInCache(500000);
store.backdoor().getStatsCache().wakeInvalidator();
}
}
EqualityVerifier
@Test public void altersInvalidation() throws Exception {
try {
String dbName="default";
String tableName="asi";
List partVals1=Arrays.asList("today");
List partVals2=Arrays.asList("yesterday");
List partVals3=Arrays.asList("tomorrow");
long now=System.currentTimeMillis();
List cols=new ArrayList<>();
cols.add(new FieldSchema("col1","boolean","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,Collections.emptyMap());
List partCols=new ArrayList<>();
partCols.add(new FieldSchema("ds","string",""));
Table table=new Table(tableName,dbName,"me",(int)now,(int)now,0,sd,partCols,Collections.emptyMap(),null,null,null);
store.createTable(table);
Partition[] partitions=new Partition[3];
int partnum=0;
for ( List partVals : Arrays.asList(partVals1,partVals2,partVals3)) {
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/default/invalidation/ds=" + partVals.get(0));
Partition part=new Partition(partVals,dbName,tableName,(int)now,(int)now,psd,Collections.emptyMap());
partitions[partnum++]=part;
store.addPartition(part);
ColumnStatistics cs=new ColumnStatistics();
ColumnStatisticsDesc desc=new ColumnStatisticsDesc(false,dbName,tableName);
desc.setLastAnalyzed(now);
desc.setPartName("ds=" + partVals.get(0));
cs.setStatsDesc(desc);
ColumnStatisticsObj obj=new ColumnStatisticsObj();
obj.setColName("col1");
obj.setColType("boolean");
ColumnStatisticsData data=new ColumnStatisticsData();
BooleanColumnStatsData bcsd=new BooleanColumnStatsData();
bcsd.setNumFalses(10);
bcsd.setNumTrues(20);
bcsd.setNumNulls(30);
data.setBooleanStats(bcsd);
obj.setStatsData(data);
cs.addToStatsObj(obj);
store.updatePartitionColumnStatistics(cs,partVals);
}
AggrStats aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=today","ds=tomorrow"),Arrays.asList("col1"));
aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=today","ds=yesterday"),Arrays.asList("col1"));
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(2,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(2,store.backdoor().getStatsCache().misses.getCnt());
store.backdoor().getStatsCache().wakeInvalidator();
Partition[] newParts=new Partition[2];
newParts[0]=new Partition(partitions[0]);
newParts[0].setLastAccessTime((int)System.currentTimeMillis());
newParts[1]=new Partition(partitions[2]);
newParts[1].setLastAccessTime((int)System.currentTimeMillis());
store.alterPartitions(dbName,tableName,Arrays.asList(partVals1,partVals3),Arrays.asList(newParts));
store.backdoor().getStatsCache().setRunInvalidatorEvery(100);
store.backdoor().getStatsCache().wakeInvalidator();
aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=tomorrow","ds=today"),Arrays.asList("col1"));
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(3,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(3,store.backdoor().getStatsCache().misses.getCnt());
aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=yesterday","ds=today"),Arrays.asList("col1"));
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(4,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(4,store.backdoor().getStatsCache().misses.getCnt());
}
finally {
store.backdoor().getStatsCache().setRunInvalidatorEvery(5000);
store.backdoor().getStatsCache().setMaxTimeInCache(500000);
store.backdoor().getStatsCache().wakeInvalidator();
}
}
InternalCallVerifier EqualityVerifier
@Test public void someWithStats() throws Exception {
String dbName="default";
String tableName="psws";
List partVals1=Arrays.asList("today");
List partVals2=Arrays.asList("yesterday");
long now=System.currentTimeMillis();
List cols=new ArrayList<>();
cols.add(new FieldSchema("col1","long","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,Collections.emptyMap());
List partCols=new ArrayList<>();
partCols.add(new FieldSchema("ds","string",""));
Table table=new Table(tableName,dbName,"me",(int)now,(int)now,0,sd,partCols,Collections.emptyMap(),null,null,null);
store.createTable(table);
boolean first=true;
for ( List partVals : Arrays.asList(partVals1,partVals2)) {
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/default/psws/ds=" + partVals.get(0));
Partition part=new Partition(partVals,dbName,tableName,(int)now,(int)now,psd,Collections.emptyMap());
store.addPartition(part);
if (first) {
ColumnStatistics cs=new ColumnStatistics();
ColumnStatisticsDesc desc=new ColumnStatisticsDesc(false,dbName,tableName);
desc.setLastAnalyzed(now);
desc.setPartName("ds=" + partVals.get(0));
cs.setStatsDesc(desc);
ColumnStatisticsObj obj=new ColumnStatisticsObj();
obj.setColName("col1");
obj.setColType("long");
ColumnStatisticsData data=new ColumnStatisticsData();
LongColumnStatsData lcsd=new LongColumnStatsData();
lcsd.setHighValue(192L);
lcsd.setLowValue(-20L);
lcsd.setNumNulls(30);
lcsd.setNumDVs(32);
data.setLongStats(lcsd);
obj.setStatsData(data);
cs.addToStatsObj(obj);
store.updatePartitionColumnStatistics(cs,partVals);
first=false;
}
}
Checker statChecker=new Checker(){
@Override public void checkStats( AggrStats aggrStats) throws Exception {
Assert.assertEquals(1,aggrStats.getPartsFound());
Assert.assertEquals(1,aggrStats.getColStatsSize());
ColumnStatisticsObj cso=aggrStats.getColStats().get(0);
Assert.assertEquals("col1",cso.getColName());
Assert.assertEquals("long",cso.getColType());
LongColumnStatsData lcsd=cso.getStatsData().getLongStats();
Assert.assertEquals(192L,lcsd.getHighValue());
Assert.assertEquals(-20L,lcsd.getLowValue());
Assert.assertEquals(30,lcsd.getNumNulls());
Assert.assertEquals(32,lcsd.getNumDVs());
}
}
;
AggrStats aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=today","ds=yesterday"),Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(1,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(1,store.backdoor().getStatsCache().misses.getCnt());
aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=yesterday","ds=today"),Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(2,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(1,store.backdoor().getStatsCache().misses.getCnt());
store.backdoor().getStatsCache().flushMemory();
aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=today","ds=yesterday"),Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(1,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(3,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(1,store.backdoor().getStatsCache().misses.getCnt());
}
InternalCallVerifier EqualityVerifier
@Test public void invalidation() throws Exception {
try {
String dbName="default";
String tableName="invalidation";
List partVals1=Arrays.asList("today");
List partVals2=Arrays.asList("yesterday");
List partVals3=Arrays.asList("tomorrow");
long now=System.currentTimeMillis();
List cols=new ArrayList<>();
cols.add(new FieldSchema("col1","boolean","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,Collections.emptyMap());
List partCols=new ArrayList<>();
partCols.add(new FieldSchema("ds","string",""));
Table table=new Table(tableName,dbName,"me",(int)now,(int)now,0,sd,partCols,Collections.emptyMap(),null,null,null);
store.createTable(table);
for ( List partVals : Arrays.asList(partVals1,partVals2,partVals3)) {
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/default/invalidation/ds=" + partVals.get(0));
Partition part=new Partition(partVals,dbName,tableName,(int)now,(int)now,psd,Collections.emptyMap());
store.addPartition(part);
ColumnStatistics cs=new ColumnStatistics();
ColumnStatisticsDesc desc=new ColumnStatisticsDesc(false,dbName,tableName);
desc.setLastAnalyzed(now);
desc.setPartName("ds=" + partVals.get(0));
cs.setStatsDesc(desc);
ColumnStatisticsObj obj=new ColumnStatisticsObj();
obj.setColName("col1");
obj.setColType("boolean");
ColumnStatisticsData data=new ColumnStatisticsData();
BooleanColumnStatsData bcsd=new BooleanColumnStatsData();
bcsd.setNumFalses(10);
bcsd.setNumTrues(20);
bcsd.setNumNulls(30);
data.setBooleanStats(bcsd);
obj.setStatsData(data);
cs.addToStatsObj(obj);
store.updatePartitionColumnStatistics(cs,partVals);
}
Checker statChecker=new Checker(){
@Override public void checkStats( AggrStats aggrStats) throws Exception {
Assert.assertEquals(2,aggrStats.getPartsFound());
Assert.assertEquals(1,aggrStats.getColStatsSize());
ColumnStatisticsObj cso=aggrStats.getColStats().get(0);
Assert.assertEquals("col1",cso.getColName());
Assert.assertEquals("boolean",cso.getColType());
BooleanColumnStatsData bcsd=cso.getStatsData().getBooleanStats();
Assert.assertEquals(20,bcsd.getNumFalses());
Assert.assertEquals(40,bcsd.getNumTrues());
Assert.assertEquals(60,bcsd.getNumNulls());
}
}
;
AggrStats aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=today","ds=yesterday"),Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(1,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(1,store.backdoor().getStatsCache().misses.getCnt());
aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=yesterday","ds=today"),Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(2,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(1,store.backdoor().getStatsCache().misses.getCnt());
aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=tomorrow","ds=today"),Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(3,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(2,store.backdoor().getStatsCache().misses.getCnt());
aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=tomorrow","ds=today"),Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(4,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(2,store.backdoor().getStatsCache().misses.getCnt());
store.backdoor().getStatsCache().wakeInvalidator();
aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=tomorrow","ds=today"),Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(5,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(2,store.backdoor().getStatsCache().misses.getCnt());
ColumnStatistics cs=new ColumnStatistics();
ColumnStatisticsDesc desc=new ColumnStatisticsDesc(false,dbName,tableName);
desc.setLastAnalyzed(now);
desc.setPartName("ds=" + partVals3.get(0));
cs.setStatsDesc(desc);
ColumnStatisticsObj obj=new ColumnStatisticsObj();
obj.setColName("col1");
obj.setColType("boolean");
ColumnStatisticsData data=new ColumnStatisticsData();
BooleanColumnStatsData bcsd=new BooleanColumnStatsData();
bcsd.setNumFalses(100);
bcsd.setNumTrues(200);
bcsd.setNumNulls(300);
data.setBooleanStats(bcsd);
obj.setStatsData(data);
cs.addToStatsObj(obj);
Checker afterUpdate=new Checker(){
@Override public void checkStats( AggrStats aggrStats) throws Exception {
Assert.assertEquals(2,aggrStats.getPartsFound());
Assert.assertEquals(1,aggrStats.getColStatsSize());
ColumnStatisticsObj cso=aggrStats.getColStats().get(0);
Assert.assertEquals("col1",cso.getColName());
Assert.assertEquals("boolean",cso.getColType());
BooleanColumnStatsData bcsd=cso.getStatsData().getBooleanStats();
Assert.assertEquals(110,bcsd.getNumFalses());
Assert.assertEquals(220,bcsd.getNumTrues());
Assert.assertEquals(330,bcsd.getNumNulls());
}
}
;
store.updatePartitionColumnStatistics(cs,partVals3);
store.backdoor().getStatsCache().setRunInvalidatorEvery(100);
store.backdoor().getStatsCache().wakeInvalidator();
aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=tomorrow","ds=today"),Arrays.asList("col1"));
afterUpdate.checkStats(aggrStats);
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(6,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(3,store.backdoor().getStatsCache().misses.getCnt());
aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=yesterday","ds=today"),Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(7,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(3,store.backdoor().getStatsCache().misses.getCnt());
store.dropPartition(dbName,tableName,partVals2);
store.backdoor().getStatsCache().wakeInvalidator();
aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=yesterday","ds=today"),Arrays.asList("col1"));
new Checker(){
@Override public void checkStats( AggrStats aggrStats) throws Exception {
Assert.assertEquals(1,aggrStats.getPartsFound());
Assert.assertEquals(1,aggrStats.getColStatsSize());
ColumnStatisticsObj cso=aggrStats.getColStats().get(0);
Assert.assertEquals("col1",cso.getColName());
Assert.assertEquals("boolean",cso.getColType());
BooleanColumnStatsData bcsd=cso.getStatsData().getBooleanStats();
Assert.assertEquals(10,bcsd.getNumFalses());
Assert.assertEquals(20,bcsd.getNumTrues());
Assert.assertEquals(30,bcsd.getNumNulls());
}
}
.checkStats(aggrStats);
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(8,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(4,store.backdoor().getStatsCache().misses.getCnt());
aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=tomorrow","ds=today"),Arrays.asList("col1"));
afterUpdate.checkStats(aggrStats);
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(9,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(4,store.backdoor().getStatsCache().misses.getCnt());
}
finally {
store.backdoor().getStatsCache().setRunInvalidatorEvery(5000);
store.backdoor().getStatsCache().setMaxTimeInCache(500000);
store.backdoor().getStatsCache().wakeInvalidator();
}
}
Class: org.apache.hadoop.hive.metastore.hbase.TestHBaseAggregateStatsCache EqualityVerifier
@Test public void noneWithStats() throws Exception {
String dbName="default";
String tableName="nws";
List partVals1=Arrays.asList("today");
List partVals2=Arrays.asList("yesterday");
long now=System.currentTimeMillis();
List cols=new ArrayList<>();
cols.add(new FieldSchema("col1","boolean","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,Collections.emptyMap());
List partCols=new ArrayList<>();
partCols.add(new FieldSchema("ds","string",""));
Table table=new Table(tableName,dbName,"me",(int)now,(int)now,0,sd,partCols,Collections.emptyMap(),null,null,null);
store.createTable(table);
for ( List partVals : Arrays.asList(partVals1,partVals2)) {
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/default/nws/ds=" + partVals.get(0));
Partition part=new Partition(partVals,dbName,tableName,(int)now,(int)now,psd,Collections.emptyMap());
store.addPartition(part);
}
Checker statChecker=new Checker(){
@Override public void checkStats( AggrStats aggrStats) throws Exception {
Assert.assertEquals(0,aggrStats.getPartsFound());
}
}
;
AggrStats aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=today","ds=yesterday"),Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
}
InternalCallVerifier EqualityVerifier
@Test public void allWithStats() throws Exception {
String dbName="default";
String tableName="hit";
List partVals1=Arrays.asList("today");
List partVals2=Arrays.asList("yesterday");
long now=System.currentTimeMillis();
List cols=new ArrayList<>();
cols.add(new FieldSchema("col1","boolean","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,Collections.emptyMap());
List partCols=new ArrayList<>();
partCols.add(new FieldSchema("ds","string",""));
Table table=new Table(tableName,dbName,"me",(int)now,(int)now,0,sd,partCols,Collections.emptyMap(),null,null,null);
store.createTable(table);
for ( List partVals : Arrays.asList(partVals1,partVals2)) {
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/default/hit/ds=" + partVals.get(0));
Partition part=new Partition(partVals,dbName,tableName,(int)now,(int)now,psd,Collections.emptyMap());
store.addPartition(part);
ColumnStatistics cs=new ColumnStatistics();
ColumnStatisticsDesc desc=new ColumnStatisticsDesc(false,dbName,tableName);
desc.setLastAnalyzed(now);
desc.setPartName("ds=" + partVals.get(0));
cs.setStatsDesc(desc);
ColumnStatisticsObj obj=new ColumnStatisticsObj();
obj.setColName("col1");
obj.setColType("boolean");
ColumnStatisticsData data=new ColumnStatisticsData();
BooleanColumnStatsData bcsd=new BooleanColumnStatsData();
bcsd.setNumFalses(10);
bcsd.setNumTrues(20);
bcsd.setNumNulls(30);
data.setBooleanStats(bcsd);
obj.setStatsData(data);
cs.addToStatsObj(obj);
store.updatePartitionColumnStatistics(cs,partVals);
}
Checker statChecker=new Checker(){
@Override public void checkStats( AggrStats aggrStats) throws Exception {
Assert.assertEquals(2,aggrStats.getPartsFound());
Assert.assertEquals(1,aggrStats.getColStatsSize());
ColumnStatisticsObj cso=aggrStats.getColStats().get(0);
Assert.assertEquals("col1",cso.getColName());
Assert.assertEquals("boolean",cso.getColType());
BooleanColumnStatsData bcsd=cso.getStatsData().getBooleanStats();
Assert.assertEquals(20,bcsd.getNumFalses());
Assert.assertEquals(40,bcsd.getNumTrues());
Assert.assertEquals(60,bcsd.getNumNulls());
}
}
;
AggrStats aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=today","ds=yesterday"),Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(1,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(1,store.backdoor().getStatsCache().misses.getCnt());
aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=yesterday","ds=today"),Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(2,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(1,store.backdoor().getStatsCache().misses.getCnt());
}
InternalCallVerifier EqualityVerifier
@Test public void nonexistentPartitions() throws Exception {
String dbName="default";
String tableName="nep";
List partVals1=Arrays.asList("today");
List partVals2=Arrays.asList("yesterday");
long now=System.currentTimeMillis();
List cols=new ArrayList<>();
cols.add(new FieldSchema("col1","boolean","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,Collections.emptyMap());
List partCols=new ArrayList<>();
partCols.add(new FieldSchema("ds","string",""));
Table table=new Table(tableName,dbName,"me",(int)now,(int)now,0,sd,partCols,Collections.emptyMap(),null,null,null);
store.createTable(table);
Checker statChecker=new Checker(){
@Override public void checkStats( AggrStats aggrStats) throws Exception {
Assert.assertEquals(0,aggrStats.getPartsFound());
}
}
;
AggrStats aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=today","ds=yesterday"),Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(1,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(1,store.backdoor().getStatsCache().misses.getCnt());
}
InternalCallVerifier EqualityVerifier
@Test public void someNonexistentPartitions() throws Exception {
String dbName="default";
String tableName="snp";
List partVals1=Arrays.asList("today");
List partVals2=Arrays.asList("yesterday");
long now=System.currentTimeMillis();
List cols=new ArrayList<>();
cols.add(new FieldSchema("col1","boolean","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,Collections.emptyMap());
List partCols=new ArrayList<>();
partCols.add(new FieldSchema("ds","string",""));
Table table=new Table(tableName,dbName,"me",(int)now,(int)now,0,sd,partCols,Collections.emptyMap(),null,null,null);
store.createTable(table);
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/default/hit/ds=" + partVals1.get(0));
Partition part=new Partition(partVals1,dbName,tableName,(int)now,(int)now,psd,Collections.emptyMap());
store.addPartition(part);
ColumnStatistics cs=new ColumnStatistics();
ColumnStatisticsDesc desc=new ColumnStatisticsDesc(false,dbName,tableName);
desc.setLastAnalyzed(now);
desc.setPartName("ds=" + partVals1.get(0));
cs.setStatsDesc(desc);
ColumnStatisticsObj obj=new ColumnStatisticsObj();
obj.setColName("col1");
obj.setColType("double");
ColumnStatisticsData data=new ColumnStatisticsData();
DoubleColumnStatsData dcsd=new DoubleColumnStatsData();
dcsd.setHighValue(1000.2342343);
dcsd.setLowValue(-20.1234213423);
dcsd.setNumNulls(30);
dcsd.setNumDVs(12342);
data.setDoubleStats(dcsd);
obj.setStatsData(data);
cs.addToStatsObj(obj);
store.updatePartitionColumnStatistics(cs,partVals1);
Checker statChecker=new Checker(){
@Override public void checkStats( AggrStats aggrStats) throws Exception {
Assert.assertEquals(1,aggrStats.getPartsFound());
Assert.assertEquals(1,aggrStats.getColStatsSize());
ColumnStatisticsObj cso=aggrStats.getColStats().get(0);
Assert.assertEquals("col1",cso.getColName());
Assert.assertEquals("double",cso.getColType());
DoubleColumnStatsData dcsd=cso.getStatsData().getDoubleStats();
Assert.assertEquals(1000.23,dcsd.getHighValue(),0.01);
Assert.assertEquals(-20.12,dcsd.getLowValue(),0.01);
Assert.assertEquals(30,dcsd.getNumNulls());
Assert.assertEquals(12342,dcsd.getNumDVs());
}
}
;
AggrStats aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=today","ds=yesterday"),Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(1,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(1,store.backdoor().getStatsCache().misses.getCnt());
aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=yesterday","ds=today"),Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(2,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(1,store.backdoor().getStatsCache().misses.getCnt());
}
Class: org.apache.hadoop.hive.metastore.hbase.TestHBaseAggregateStatsCacheWithBitVector InternalCallVerifier EqualityVerifier
@Test public void allPartitions() throws Exception {
String dbName="default";
String tableName="snp";
List partVals1=Arrays.asList("today");
List partVals2=Arrays.asList("yesterday");
long now=System.currentTimeMillis();
List cols=new ArrayList<>();
cols.add(new FieldSchema("col1","boolean","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,Collections.emptyMap());
List partCols=new ArrayList<>();
partCols.add(new FieldSchema("ds","string",""));
Table table=new Table(tableName,dbName,"me",(int)now,(int)now,0,sd,partCols,Collections.emptyMap(),null,null,null);
store.createTable(table);
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/default/hit/ds=" + partVals1.get(0));
Partition part=new Partition(partVals1,dbName,tableName,(int)now,(int)now,psd,Collections.emptyMap());
store.addPartition(part);
psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/default/hit/ds=" + partVals2.get(0));
part=new Partition(partVals2,dbName,tableName,(int)now,(int)now,psd,Collections.emptyMap());
store.addPartition(part);
ColumnStatistics cs=new ColumnStatistics();
ColumnStatisticsDesc desc=new ColumnStatisticsDesc(false,dbName,tableName);
desc.setLastAnalyzed(now);
desc.setPartName("ds=" + partVals1.get(0));
cs.setStatsDesc(desc);
ColumnStatisticsObj obj=new ColumnStatisticsObj();
obj.setColName("col1");
obj.setColType("double");
ColumnStatisticsData data=new ColumnStatisticsData();
DoubleColumnStatsData dcsd=new DoubleColumnStatsData();
dcsd.setHighValue(1000.2342343);
dcsd.setLowValue(-20.1234213423);
dcsd.setNumNulls(30);
dcsd.setNumDVs(12342);
dcsd.setBitVectors("{0, 4, 5, 7}{0, 1}{0, 1, 2}{0, 1, 4}{0}{0, 2}{0, 3}{0, 2, 3, 4}{0, 1, 4}{0, 1}{0}{0, 1, 3, 8}{0, 2}{0, 2}{0, 9}{0, 1, 4}");
data.setDoubleStats(dcsd);
obj.setStatsData(data);
cs.addToStatsObj(obj);
store.updatePartitionColumnStatistics(cs,partVals1);
cs=new ColumnStatistics();
desc=new ColumnStatisticsDesc(false,dbName,tableName);
desc.setLastAnalyzed(now);
desc.setPartName("ds=" + partVals2.get(0));
cs.setStatsDesc(desc);
obj=new ColumnStatisticsObj();
obj.setColName("col1");
obj.setColType("double");
data=new ColumnStatisticsData();
dcsd=new DoubleColumnStatsData();
dcsd.setHighValue(1000.2342343);
dcsd.setLowValue(-20.1234213423);
dcsd.setNumNulls(30);
dcsd.setNumDVs(12342);
dcsd.setBitVectors("{0, 1}{0, 1}{1, 2, 4}{0, 1, 2}{0, 1, 2}{0, 2}{0, 1, 3, 4}{0, 1}{0, 1}{3, 4, 6}{2}{0, 1}{0, 3}{0}{0, 1}{0, 1, 4}");
data.setDoubleStats(dcsd);
obj.setStatsData(data);
cs.addToStatsObj(obj);
store.updatePartitionColumnStatistics(cs,partVals2);
Checker statChecker=new Checker(){
@Override public void checkStats( AggrStats aggrStats) throws Exception {
Assert.assertEquals(2,aggrStats.getPartsFound());
Assert.assertEquals(1,aggrStats.getColStatsSize());
ColumnStatisticsObj cso=aggrStats.getColStats().get(0);
Assert.assertEquals("col1",cso.getColName());
Assert.assertEquals("double",cso.getColType());
DoubleColumnStatsData dcsd=cso.getStatsData().getDoubleStats();
Assert.assertEquals(1000.23,dcsd.getHighValue(),0.01);
Assert.assertEquals(-20.12,dcsd.getLowValue(),0.01);
Assert.assertEquals(60,dcsd.getNumNulls());
Assert.assertEquals(5,dcsd.getNumDVs());
Assert.assertEquals("{0, 1, 4, 5, 7}{0, 1}{0, 1, 2, 4}{0, 1, 2, 4}{0, 1, 2}{0, 2}{0, 1, 3, 4}{0, 1, 2, 3, 4}{0, 1, 4}{0, 1, 3, 4, 6}{0, 2}{0, 1, 3, 8}{0, 2, 3}{0, 2}{0, 1, 9}{0, 1, 4}",dcsd.getBitVectors());
}
}
;
AggrStats aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=today","ds=yesterday"),Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(1,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(1,store.backdoor().getStatsCache().misses.getCnt());
aggrStats=store.get_aggr_stats_for(dbName,tableName,Arrays.asList("ds=yesterday","ds=today"),Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(0,store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(2,store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(1,store.backdoor().getStatsCache().misses.getCnt());
}
Class: org.apache.hadoop.hive.metastore.hbase.TestHBaseFilterPlanUtil EqualityVerifier
/**
* Test function that finds greater/lesser marker
*/
@Test public void testgetComparedMarker(){
ScanMarker l;
ScanMarker r;
l=new ScanMarker("1",INCLUSIVE,"int");
r=new ScanMarker("1",INCLUSIVE,"int");
assertFirstGreater(l,r);
l=new ScanMarker("1",!INCLUSIVE,"int");
r=new ScanMarker("1",!INCLUSIVE,"int");
assertFirstGreater(l,r);
assertFirstGreater(null,null);
l=new ScanMarker("1",!INCLUSIVE,"int");
Assert.assertEquals(l,ScanPlan.getComparedMarker(l,null,true));
Assert.assertEquals(l,ScanPlan.getComparedMarker(null,l,true));
Assert.assertEquals(l,ScanPlan.getComparedMarker(l,null,false));
Assert.assertEquals(l,ScanPlan.getComparedMarker(null,l,false));
l=new ScanMarker("2",INCLUSIVE,"int");
r=new ScanMarker("1",INCLUSIVE,"int");
assertFirstGreater(l,r);
}
InternalCallVerifier EqualityVerifier
/**
* Test MultiScanPlan AND
*/
@Test public void testMultiScanPlanAnd(){
MultiScanPlan l=createMultiScanPlan(new ScanPlan());
MultiScanPlan r=createMultiScanPlan(new ScanPlan());
Assert.assertEquals(1,l.and(r).getPlans().size());
r=createMultiScanPlan(new ScanPlan(),new ScanPlan());
Assert.assertEquals(2,l.and(r).getPlans().size());
Assert.assertEquals(2,r.and(l).getPlans().size());
l=createMultiScanPlan(new ScanPlan(),new ScanPlan());
r=createMultiScanPlan(new ScanPlan(),new ScanPlan(),new ScanPlan());
Assert.assertEquals(6,l.and(r).getPlans().size());
Assert.assertEquals(6,r.and(l).getPlans().size());
}
InternalCallVerifier EqualityVerifier
/**
* Test MultiScanPlan OR
*/
@Test public void testMultiScanPlanOr(){
MultiScanPlan l=createMultiScanPlan(new ScanPlan());
MultiScanPlan r=createMultiScanPlan(new ScanPlan());
Assert.assertEquals(2,l.or(r).getPlans().size());
Assert.assertEquals(2,l.or(new ScanPlan()).getPlans().size());
Assert.assertEquals(2,(new ScanPlan()).or(l).getPlans().size());
r=createMultiScanPlan(new ScanPlan(),new ScanPlan());
Assert.assertEquals(3,l.or(r).getPlans().size());
Assert.assertEquals(3,r.or(l).getPlans().size());
}
EqualityVerifier
/**
* Test the function that compares byte arrays
*/
@Test public void testCompare(){
Assert.assertEquals(-1,HBaseFilterPlanUtil.compare(new byte[]{1,2},new byte[]{1,3}));
Assert.assertEquals(-1,HBaseFilterPlanUtil.compare(new byte[]{1,2,3},new byte[]{1,3}));
Assert.assertEquals(-1,HBaseFilterPlanUtil.compare(new byte[]{1,2},new byte[]{1,2,3}));
Assert.assertEquals(0,HBaseFilterPlanUtil.compare(new byte[]{3,2},new byte[]{3,2}));
Assert.assertEquals(1,HBaseFilterPlanUtil.compare(new byte[]{3,2,1},new byte[]{3,2}));
Assert.assertEquals(1,HBaseFilterPlanUtil.compare(new byte[]{3,3,1},new byte[]{3,2}));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testPartitionKeyScannerMixedType() throws Exception {
List parts=new ArrayList();
parts.add(new FieldSchema("year","int",null));
parts.add(new FieldSchema("month","int",null));
parts.add(new FieldSchema("state","string",null));
ExpressionTree exprTree=PartFilterExprUtil.getFilterParser("year = 2015 and state = 'CA'").tree;
PlanResult planRes=HBaseFilterPlanUtil.getFilterPlan(exprTree,parts);
Assert.assertEquals(planRes.plan.getPlans().size(),1);
ScanPlan sp=planRes.plan.getPlans().get(0);
byte[] startRowSuffix=sp.getStartRowSuffix("testdb","testtb",parts);
byte[] endRowSuffix=sp.getEndRowSuffix("testdb","testtb",parts);
RowFilter filter=(RowFilter)sp.getFilter(parts);
Assert.assertTrue(Bytes.contains(startRowSuffix,Shorts.toByteArray((short)2015)));
Assert.assertTrue(Bytes.contains(endRowSuffix,Shorts.toByteArray((short)2016)));
PartitionKeyComparator comparator=(PartitionKeyComparator)filter.getComparator();
Assert.assertEquals(comparator.ranges.size(),1);
Assert.assertEquals(comparator.ranges.get(0).keyName,"state");
}
InternalCallVerifier EqualityVerifier
/**
* Test ScanPlan OR operation
*/
@Test public void testScanPlanOr(){
ScanPlan l=new ScanPlan();
ScanPlan r=new ScanPlan();
l.setStartMarker("a","int","1",INCLUSIVE);
r.setStartMarker("a","int","11",INCLUSIVE);
FilterPlan res1=l.or(r);
Assert.assertEquals(2,res1.getPlans().size());
res1.getPlans().get(0).markers.get("a").startMarker.equals(l.markers.get("a").startMarker);
res1.getPlans().get(1).markers.get("a").startMarker.equals(r.markers.get("a").startMarker);
FilterPlan res2=res1.or(r);
Assert.assertEquals(3,res2.getPlans().size());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
/**
* Test plan generation from TreeNode
* @throws MetaException
*/
@Test public void testTreeNodePlan() throws MetaException {
final String KEY="k1";
final String VAL1="10";
final String VAL2="11";
LeafNode l=new LeafNode();
l.keyName=KEY;
l.value=VAL1;
final ScanMarker DEFAULT_SCANMARKER=null;
List parts=new ArrayList();
parts.add(new FieldSchema("k1","int",null));
LeafNode r=new LeafNode();
r.keyName=KEY;
r.value=VAL2;
TreeNode tn=new TreeNode(l,LogicalOperator.AND,r);
l.operator=Operator.GREATERTHANOREQUALTO;
r.operator=Operator.LESSTHAN;
verifyPlan(tn,parts,KEY,new ScanMarker(VAL1,INCLUSIVE,"int"),new ScanMarker(VAL2,!INCLUSIVE,"int"));
l.operator=Operator.GREATERTHANOREQUALTO;
r.operator=Operator.GREATERTHAN;
verifyPlan(tn,parts,KEY,new ScanMarker(VAL2,!INCLUSIVE,"int"),DEFAULT_SCANMARKER);
tn=new TreeNode(l,LogicalOperator.OR,r);
ExpressionTree e=new ExpressionTree();
e.setRootForTest(tn);
PlanResult planRes=HBaseFilterPlanUtil.getFilterPlan(e,parts);
Assert.assertEquals(2,planRes.plan.getPlans().size());
Assert.assertEquals(false,planRes.hasUnsupportedCondition);
TreeNode tn2=new TreeNode(l,LogicalOperator.AND,tn);
e=new ExpressionTree();
e.setRootForTest(tn2);
planRes=HBaseFilterPlanUtil.getFilterPlan(e,parts);
Assert.assertEquals(2,planRes.plan.getPlans().size());
Assert.assertEquals(false,planRes.hasUnsupportedCondition);
LeafNode klike=new LeafNode();
klike.keyName=KEY;
klike.value=VAL1;
klike.operator=Operator.LIKE;
TreeNode tn3=new TreeNode(tn2,LogicalOperator.OR,klike);
e=new ExpressionTree();
e.setRootForTest(tn3);
planRes=HBaseFilterPlanUtil.getFilterPlan(e,parts);
Assert.assertEquals(3,planRes.plan.getPlans().size());
Assert.assertEquals(false,planRes.hasUnsupportedCondition);
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testPartitionKeyScannerAllString() throws Exception {
List parts=new ArrayList();
parts.add(new FieldSchema("year","string",null));
parts.add(new FieldSchema("month","string",null));
parts.add(new FieldSchema("state","string",null));
ExpressionTree exprTree=PartFilterExprUtil.getFilterParser("year = 2015 and state = 'CA'").tree;
PlanResult planRes=HBaseFilterPlanUtil.getFilterPlan(exprTree,parts);
Assert.assertEquals(planRes.plan.getPlans().size(),1);
ScanPlan sp=planRes.plan.getPlans().get(0);
byte[] startRowSuffix=sp.getStartRowSuffix("testdb","testtb",parts);
byte[] endRowSuffix=sp.getEndRowSuffix("testdb","testtb",parts);
RowFilter filter=(RowFilter)sp.getFilter(parts);
Assert.assertTrue(Bytes.contains(startRowSuffix,"2015".getBytes()));
Assert.assertTrue(Bytes.contains(endRowSuffix,"2015".getBytes()));
Assert.assertFalse(Bytes.contains(startRowSuffix,"CA".getBytes()));
Assert.assertFalse(Bytes.contains(endRowSuffix,"CA".getBytes()));
PartitionKeyComparator comparator=(PartitionKeyComparator)filter.getComparator();
Assert.assertEquals(comparator.ranges.size(),1);
Assert.assertEquals(comparator.ranges.get(0).keyName,"state");
exprTree=PartFilterExprUtil.getFilterParser("year = 2015 and month > 10 " + "and month <= 11 and state like 'C%'").tree;
planRes=HBaseFilterPlanUtil.getFilterPlan(exprTree,parts);
Assert.assertEquals(planRes.plan.getPlans().size(),1);
sp=planRes.plan.getPlans().get(0);
startRowSuffix=sp.getStartRowSuffix("testdb","testtb",parts);
endRowSuffix=sp.getEndRowSuffix("testdb","testtb",parts);
filter=(RowFilter)sp.getFilter(parts);
Assert.assertTrue(Bytes.contains(startRowSuffix,"2015".getBytes()));
Assert.assertTrue(Bytes.contains(endRowSuffix,"2015".getBytes()));
Assert.assertTrue(Bytes.contains(startRowSuffix,"10".getBytes()));
Assert.assertTrue(Bytes.contains(endRowSuffix,"11".getBytes()));
comparator=(PartitionKeyComparator)filter.getComparator();
Assert.assertEquals(comparator.ops.size(),1);
Assert.assertEquals(comparator.ops.get(0).keyName,"state");
exprTree=PartFilterExprUtil.getFilterParser("year >= 2014 and month > 10 " + "and month <= 11 and state like 'C%'").tree;
planRes=HBaseFilterPlanUtil.getFilterPlan(exprTree,parts);
Assert.assertEquals(planRes.plan.getPlans().size(),1);
sp=planRes.plan.getPlans().get(0);
startRowSuffix=sp.getStartRowSuffix("testdb","testtb",parts);
endRowSuffix=sp.getEndRowSuffix("testdb","testtb",parts);
filter=(RowFilter)sp.getFilter(parts);
Assert.assertTrue(Bytes.contains(startRowSuffix,"2014".getBytes()));
comparator=(PartitionKeyComparator)filter.getComparator();
Assert.assertEquals(comparator.ranges.size(),1);
Assert.assertEquals(comparator.ranges.get(0).keyName,"month");
Assert.assertEquals(comparator.ops.size(),1);
Assert.assertEquals(comparator.ops.get(0).keyName,"state");
exprTree=PartFilterExprUtil.getFilterParser("year = 2014 and (month > 10 " + "or month < 3)").tree;
planRes=HBaseFilterPlanUtil.getFilterPlan(exprTree,parts);
sp=planRes.plan.getPlans().get(0);
startRowSuffix=sp.getStartRowSuffix("testdb","testtb",parts);
endRowSuffix=sp.getEndRowSuffix("testdb","testtb",parts);
filter=(RowFilter)sp.getFilter(parts);
Assert.assertTrue(Bytes.contains(startRowSuffix,"2014".getBytes()));
Assert.assertTrue(Bytes.contains(endRowSuffix,"2014".getBytes()));
Assert.assertTrue(Bytes.contains(startRowSuffix,"10".getBytes()));
sp=planRes.plan.getPlans().get(1);
startRowSuffix=sp.getStartRowSuffix("testdb","testtb",parts);
endRowSuffix=sp.getEndRowSuffix("testdb","testtb",parts);
filter=(RowFilter)sp.getFilter(parts);
Assert.assertTrue(Bytes.contains(startRowSuffix,"2014".getBytes()));
Assert.assertTrue(Bytes.contains(endRowSuffix,"2014".getBytes()));
Assert.assertTrue(Bytes.contains(endRowSuffix,"3".getBytes()));
}
InternalCallVerifier EqualityVerifier
/**
* Test ScanPlan AND operation
*/
@Test public void testScanPlanAnd(){
ScanPlan l=new ScanPlan();
ScanPlan r=new ScanPlan();
l.setStartMarker("a","int","10",INCLUSIVE);
r.setStartMarker("a","int","10",INCLUSIVE);
ScanPlan res;
res=l.and(r).getPlans().get(0);
Assert.assertEquals(new ScanMarker("10",INCLUSIVE,"int"),res.markers.get("a").startMarker);
l.setEndMarker("a","int","20",INCLUSIVE);
r.setEndMarker("a","int","20",INCLUSIVE);
res=l.and(r).getPlans().get(0);
Assert.assertEquals(new ScanMarker("10",INCLUSIVE,"int"),res.markers.get("a").startMarker);
Assert.assertEquals(new ScanMarker("20",INCLUSIVE,"int"),res.markers.get("a").endMarker);
l.setStartMarker("a","int","10",!INCLUSIVE);
l.setEndMarker("a","int","20",INCLUSIVE);
r.setStartMarker("a","int","10",INCLUSIVE);
r.setEndMarker("a","int","15",INCLUSIVE);
res=l.and(r).getPlans().get(0);
Assert.assertEquals(l.markers.get("a").startMarker,res.markers.get("a").startMarker);
Assert.assertEquals(r.markers.get("a").endMarker,res.markers.get("a").endMarker);
}
Class: org.apache.hadoop.hive.metastore.hbase.TestHBaseImport APIUtilityVerifier IterativeVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void importSecurity() throws Exception {
RawStore rdbms;
rdbms=new ObjectStore();
rdbms.setConf(conf);
String[] dbNames=new String[]{"securitydb1","securitydb2"};
String[] roles=new String[]{"securityrole1","securityrole2"};
String[] tokenIds=new String[]{"securitytokenid1","securitytokenid2"};
String[] tokens=new String[]{"securitytoken1","securitytoken2"};
String[] masterKeys=new String[]{"securitymk1","securitymk2"};
int now=(int)System.currentTimeMillis() / 1000;
setupObjectStore(rdbms,roles,dbNames,tokenIds,tokens,masterKeys,now);
int baseNumRoles=store.listRoleNames() == null ? 0 : store.listRoleNames().size();
int baseNumDbs=store.getAllDatabases() == null ? 0 : store.getAllDatabases().size();
HBaseImport importer=new HBaseImport("-k");
importer.setConnections(rdbms,store);
importer.run();
Assert.assertEquals(baseNumRoles,store.listRoleNames().size());
Assert.assertEquals(baseNumDbs,store.getAllDatabases().size());
for (int i=0; i < tokenIds.length; i++) {
Assert.assertEquals(tokens[i],store.getToken(tokenIds[i]));
}
String[] hbaseKeys=store.getMasterKeys();
Set keys=new HashSet<>(Arrays.asList(hbaseKeys));
for (int i=0; i < masterKeys.length; i++) {
Assert.assertTrue(keys.contains(masterKeys[i]));
}
}
EqualityVerifier NullVerifier HybridVerifier
@Test public void importOneRole() throws Exception {
RawStore rdbms;
rdbms=new ObjectStore();
rdbms.setConf(conf);
String[] dbNames=new String[]{"oneroledb1","oneroledb2"};
String[] roles=new String[]{"onerolerole1","onerolerole2"};
String[] tokenIds=new String[]{"oneroletokenid1","oneroletokenid2"};
String[] tokens=new String[]{"oneroletoken1","oneroletoken2"};
String[] masterKeys=new String[]{"onerolemk1","onerolemk2"};
int now=(int)System.currentTimeMillis() / 1000;
setupObjectStore(rdbms,roles,dbNames,tokenIds,tokens,masterKeys,now);
int baseNumRoles=store.listRoleNames() == null ? 0 : store.listRoleNames().size();
int baseNumDbs=store.getAllDatabases() == null ? 0 : store.getAllDatabases().size();
int baseNumToks=store.getAllTokenIdentifiers() == null ? 0 : store.getAllTokenIdentifiers().size();
int baseNumKeys=store.getMasterKeys() == null ? 0 : store.getMasterKeys().length;
HBaseImport importer=new HBaseImport("-r",roles[0]);
importer.setConnections(rdbms,store);
importer.run();
Role role=store.getRole(roles[0]);
Assert.assertNotNull(role);
Assert.assertEquals(roles[0],role.getRoleName());
Assert.assertEquals(baseNumRoles + 1,store.listRoleNames().size());
Assert.assertEquals(baseNumDbs,store.getAllDatabases().size());
Assert.assertEquals(baseNumToks,store.getAllTokenIdentifiers().size());
String[] hbaseKeys=store.getMasterKeys();
Assert.assertEquals(baseNumKeys,hbaseKeys.length);
thrown.expect(NoSuchObjectException.class);
store.getRole(roles[1]);
}
APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void importAll() throws Exception {
RawStore rdbms;
rdbms=new ObjectStore();
rdbms.setConf(conf);
String[] dbNames=new String[]{"alldb1","alldb2"};
String[] roles=new String[]{"allrole1","allrole2"};
String[] tokenIds=new String[]{"alltokenid1","alltokenid2"};
String[] tokens=new String[]{"alltoken1","alltoken2"};
String[] masterKeys=new String[]{"allmk1","allmk2"};
int now=(int)System.currentTimeMillis() / 1000;
setupObjectStore(rdbms,roles,dbNames,tokenIds,tokens,masterKeys,now);
int baseNumRoles=store.listRoleNames() == null ? 0 : store.listRoleNames().size();
int baseNumDbs=store.getAllDatabases() == null ? 0 : store.getAllDatabases().size();
HBaseImport importer=new HBaseImport("-a");
importer.setConnections(rdbms,store);
importer.run();
for (int i=0; i < roles.length; i++) {
Role role=store.getRole(roles[i]);
Assert.assertNotNull(role);
Assert.assertEquals(roles[i],role.getRoleName());
}
Assert.assertEquals(baseNumRoles + 2,store.listRoleNames().size());
for (int i=0; i < dbNames.length; i++) {
Database db=store.getDatabase(dbNames[i]);
Assert.assertNotNull(db);
Assert.assertEquals("file:/tmp",db.getLocationUri());
Table table=store.getTable(db.getName(),tableNames[0]);
Assert.assertNotNull(table);
Assert.assertEquals(now,table.getLastAccessTime());
Assert.assertEquals("input",table.getSd().getInputFormat());
table=store.getTable(db.getName(),tableNames[1]);
Assert.assertNotNull(table);
for (int j=0; j < partVals.length; j++) {
Partition part=store.getPartition(dbNames[i],tableNames[1],Arrays.asList(partVals[j]));
Assert.assertNotNull(part);
Assert.assertEquals("file:/tmp/region=" + partVals[j],part.getSd().getLocation());
}
Assert.assertEquals(4,store.getPartitions(dbNames[i],tableNames[1],-1).size());
Assert.assertEquals(2,store.getAllTables(dbNames[i]).size());
Assert.assertEquals(2,store.getFunctions(dbNames[i],"*").size());
for (int j=0; j < funcNames.length; j++) {
Assert.assertNotNull(store.getFunction(dbNames[i],funcNames[j]));
}
}
Assert.assertEquals(baseNumDbs + 2,store.getAllDatabases().size());
for (int i=0; i < tokenIds.length; i++) {
Assert.assertEquals(tokens[i],store.getToken(tokenIds[i]));
}
String[] hbaseKeys=store.getMasterKeys();
Set keys=new HashSet<>(Arrays.asList(hbaseKeys));
for (int i=0; i < masterKeys.length; i++) {
Assert.assertTrue(keys.contains(masterKeys[i]));
}
}
EqualityVerifier NullVerifier HybridVerifier
@Test public void importOneFunc() throws Exception {
RawStore rdbms;
rdbms=new ObjectStore();
rdbms.setConf(conf);
String[] dbNames=new String[]{"onefuncdb1","onefuncdb2"};
String[] roles=new String[]{"onefuncrole1","onefuncrole2"};
String[] tokenIds=new String[]{"onefunctokenid1","onefunctokenid2"};
String[] tokens=new String[]{"onefunctoken1","onefunctoken2"};
String[] masterKeys=new String[]{"onefuncmk1","onefuncmk2"};
int now=(int)System.currentTimeMillis() / 1000;
setupObjectStore(rdbms,roles,dbNames,tokenIds,tokens,masterKeys,now);
int baseNumRoles=store.listRoleNames() == null ? 0 : store.listRoleNames().size();
int baseNumDbs=store.getAllDatabases() == null ? 0 : store.getAllDatabases().size();
int baseNumToks=store.getAllTokenIdentifiers() == null ? 0 : store.getAllTokenIdentifiers().size();
int baseNumKeys=store.getMasterKeys() == null ? 0 : store.getMasterKeys().length;
store.createDatabase(new Database(dbNames[0],"no description","file:/tmp",emptyParameters));
HBaseImport importer=new HBaseImport("-f",dbNames[0] + "." + funcNames[0]);
importer.setConnections(rdbms,store);
importer.run();
Assert.assertEquals(baseNumRoles,store.listRoleNames().size());
Database db=store.getDatabase(dbNames[0]);
Assert.assertNotNull(db);
Assert.assertEquals(0,store.getAllTables(dbNames[0]).size());
Assert.assertEquals(1,store.getFunctions(dbNames[0],"*").size());
Assert.assertNotNull(store.getFunction(dbNames[0],funcNames[0]));
Assert.assertNull(store.getFunction(dbNames[0],funcNames[1]));
Assert.assertEquals(baseNumDbs + 1,store.getAllDatabases().size());
Assert.assertEquals(baseNumToks,store.getAllTokenIdentifiers().size());
String[] hbaseKeys=store.getMasterKeys();
Assert.assertEquals(baseNumKeys,hbaseKeys.length);
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void importOneTableNonPartitioned() throws Exception {
RawStore rdbms;
rdbms=new ObjectStore();
rdbms.setConf(conf);
String[] dbNames=new String[]{"onetabdb1","onetabdb2"};
String[] roles=new String[]{"onetabrole1","onetabrole2"};
String[] tokenIds=new String[]{"onetabtokenid1","onetabtokenid2"};
String[] tokens=new String[]{"onetabtoken1","onetabtoken2"};
String[] masterKeys=new String[]{"onetabmk1","onetabmk2"};
int now=(int)System.currentTimeMillis() / 1000;
setupObjectStore(rdbms,roles,dbNames,tokenIds,tokens,masterKeys,now);
int baseNumRoles=store.listRoleNames() == null ? 0 : store.listRoleNames().size();
int baseNumDbs=store.getAllDatabases() == null ? 0 : store.getAllDatabases().size();
int baseNumToks=store.getAllTokenIdentifiers() == null ? 0 : store.getAllTokenIdentifiers().size();
int baseNumKeys=store.getMasterKeys() == null ? 0 : store.getMasterKeys().length;
store.createDatabase(new Database(dbNames[0],"no description","file:/tmp",emptyParameters));
HBaseImport importer=new HBaseImport("-t",dbNames[0] + "." + tableNames[0]);
importer.setConnections(rdbms,store);
importer.run();
Assert.assertEquals(baseNumRoles,store.listRoleNames().size());
Database db=store.getDatabase(dbNames[0]);
Assert.assertNotNull(db);
Table table=store.getTable(db.getName(),tableNames[0]);
Assert.assertNotNull(table);
Assert.assertEquals(1,store.getAllTables(db.getName()).size());
Assert.assertNull(store.getTable(db.getName(),tableNames[1]));
Assert.assertEquals(0,store.getFunctions(dbNames[0],"*").size());
Assert.assertEquals(baseNumDbs + 1,store.getAllDatabases().size());
Assert.assertEquals(baseNumToks,store.getAllTokenIdentifiers().size());
String[] hbaseKeys=store.getMasterKeys();
Assert.assertEquals(baseNumKeys,hbaseKeys.length);
}
IterativeVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void parallel() throws Exception {
int parallelFactor=10;
RawStore rdbms;
rdbms=new ObjectStore();
rdbms.setConf(conf);
String[] dbNames=new String[]{"paralleldb1"};
int now=(int)System.currentTimeMillis() / 1000;
for (int i=0; i < dbNames.length; i++) {
rdbms.createDatabase(new Database(dbNames[i],"no description","file:/tmp",emptyParameters));
List cols=new ArrayList<>();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
List partCols=new ArrayList<>();
partCols.add(new FieldSchema("region","string",""));
for (int j=0; j < parallelFactor; j++) {
rdbms.createTable(new Table("t" + j,dbNames[i],"me",now,now,0,sd,partCols,emptyParameters,null,null,null));
for (int k=0; k < parallelFactor; k++) {
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/region=" + k);
Partition part=new Partition(Arrays.asList("p" + k),dbNames[i],"t" + j,now,now,psd,emptyParameters);
rdbms.addPartition(part);
}
}
}
HBaseImport importer=new HBaseImport("-p","2","-b","2","-d",dbNames[0]);
importer.setConnections(rdbms,store);
importer.run();
for (int i=0; i < dbNames.length; i++) {
Database db=store.getDatabase(dbNames[i]);
Assert.assertNotNull(db);
for (int j=0; j < parallelFactor; j++) {
Table table=store.getTable(db.getName(),"t" + j);
Assert.assertNotNull(table);
Assert.assertEquals(now,table.getLastAccessTime());
Assert.assertEquals("input",table.getSd().getInputFormat());
for (int k=0; k < parallelFactor; k++) {
Partition part=store.getPartition(dbNames[i],"t" + j,Arrays.asList("p" + k));
Assert.assertNotNull(part);
Assert.assertEquals("file:/tmp/region=" + k,part.getSd().getLocation());
}
Assert.assertEquals(parallelFactor,store.getPartitions(dbNames[i],"t" + j,-1).size());
}
Assert.assertEquals(parallelFactor,store.getAllTables(dbNames[i]).size());
}
}
IterativeVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void importOneTablePartitioned() throws Exception {
RawStore rdbms;
rdbms=new ObjectStore();
rdbms.setConf(conf);
String[] dbNames=new String[]{"onetabpartdb1","onetabpartodb2"};
String[] roles=new String[]{"onetabpartorole1","onetabpartorole2"};
String[] tokenIds=new String[]{"onetabpartotokenid1","onetabpartotokenid2"};
String[] tokens=new String[]{"onetabpartotoken1","onetabpartotoken2"};
String[] masterKeys=new String[]{"onetabpartomk1","onetabpartomk2"};
int now=(int)System.currentTimeMillis() / 1000;
setupObjectStore(rdbms,roles,dbNames,tokenIds,tokens,masterKeys,now);
int baseNumRoles=store.listRoleNames() == null ? 0 : store.listRoleNames().size();
int baseNumDbs=store.getAllDatabases() == null ? 0 : store.getAllDatabases().size();
int baseNumToks=store.getAllTokenIdentifiers() == null ? 0 : store.getAllTokenIdentifiers().size();
int baseNumKeys=store.getMasterKeys() == null ? 0 : store.getMasterKeys().length;
store.createDatabase(new Database(dbNames[0],"no description","file:/tmp",emptyParameters));
HBaseImport importer=new HBaseImport("-t",dbNames[0] + "." + tableNames[1]);
importer.setConnections(rdbms,store);
importer.run();
Assert.assertEquals(baseNumRoles,store.listRoleNames().size());
Database db=store.getDatabase(dbNames[0]);
Assert.assertNotNull(db);
Table table=store.getTable(db.getName(),tableNames[1]);
Assert.assertNotNull(table);
Assert.assertEquals(1,store.getAllTables(db.getName()).size());
for (int j=0; j < partVals.length; j++) {
Partition part=store.getPartition(dbNames[0],tableNames[1],Arrays.asList(partVals[j]));
Assert.assertNotNull(part);
Assert.assertEquals("file:/tmp/region=" + partVals[j],part.getSd().getLocation());
}
Assert.assertEquals(4,store.getPartitions(dbNames[0],tableNames[1],-1).size());
Assert.assertNull(store.getTable(db.getName(),tableNames[0]));
Assert.assertEquals(0,store.getFunctions(dbNames[0],"*").size());
Assert.assertEquals(baseNumDbs + 1,store.getAllDatabases().size());
Assert.assertEquals(baseNumToks,store.getAllTokenIdentifiers().size());
String[] hbaseKeys=store.getMasterKeys();
Assert.assertEquals(baseNumKeys,hbaseKeys.length);
}
IterativeVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void importOneDb() throws Exception {
RawStore rdbms;
rdbms=new ObjectStore();
rdbms.setConf(conf);
String[] dbNames=new String[]{"onedbdb1","onedbdb2"};
String[] roles=new String[]{"onedbrole1","onedbrole2"};
String[] tokenIds=new String[]{"onedbtokenid1","onedbtokenid2"};
String[] tokens=new String[]{"onedbtoken1","onedbtoken2"};
String[] masterKeys=new String[]{"onedbmk1","onedbmk2"};
int now=(int)System.currentTimeMillis() / 1000;
setupObjectStore(rdbms,roles,dbNames,tokenIds,tokens,masterKeys,now);
int baseNumRoles=store.listRoleNames() == null ? 0 : store.listRoleNames().size();
int baseNumDbs=store.getAllDatabases() == null ? 0 : store.getAllDatabases().size();
int baseNumToks=store.getAllTokenIdentifiers() == null ? 0 : store.getAllTokenIdentifiers().size();
int baseNumKeys=store.getMasterKeys() == null ? 0 : store.getMasterKeys().length;
HBaseImport importer=new HBaseImport("-d",dbNames[0]);
importer.setConnections(rdbms,store);
importer.run();
Assert.assertEquals(baseNumRoles,store.listRoleNames().size());
Database db=store.getDatabase(dbNames[0]);
Assert.assertNotNull(db);
Assert.assertEquals("file:/tmp",db.getLocationUri());
Table table=store.getTable(db.getName(),tableNames[0]);
Assert.assertNotNull(table);
Assert.assertEquals(now,table.getLastAccessTime());
Assert.assertEquals("input",table.getSd().getInputFormat());
table=store.getTable(db.getName(),tableNames[1]);
Assert.assertNotNull(table);
for (int j=0; j < partVals.length; j++) {
Partition part=store.getPartition(dbNames[0],tableNames[1],Arrays.asList(partVals[j]));
Assert.assertNotNull(part);
Assert.assertEquals("file:/tmp/region=" + partVals[j],part.getSd().getLocation());
}
Assert.assertEquals(4,store.getPartitions(dbNames[0],tableNames[1],-1).size());
Assert.assertEquals(2,store.getAllTables(dbNames[0]).size());
Assert.assertEquals(2,store.getFunctions(dbNames[0],"*").size());
for (int j=0; j < funcNames.length; j++) {
Assert.assertNotNull(store.getFunction(dbNames[0],funcNames[j]));
}
Assert.assertEquals(baseNumDbs + 1,store.getAllDatabases().size());
Assert.assertEquals(baseNumToks,store.getAllTokenIdentifiers().size());
String[] hbaseKeys=store.getMasterKeys();
Assert.assertEquals(baseNumKeys,hbaseKeys.length);
thrown.expect(NoSuchObjectException.class);
store.getDatabase(dbNames[1]);
}
IterativeVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void parallelOdd() throws Exception {
int parallelFactor=9;
RawStore rdbms;
rdbms=new ObjectStore();
rdbms.setConf(conf);
String[] dbNames=new String[]{"oddparalleldb1"};
int now=(int)System.currentTimeMillis() / 1000;
for (int i=0; i < dbNames.length; i++) {
rdbms.createDatabase(new Database(dbNames[i],"no description","file:/tmp",emptyParameters));
List cols=new ArrayList<>();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
List partCols=new ArrayList<>();
partCols.add(new FieldSchema("region","string",""));
for (int j=0; j < parallelFactor; j++) {
rdbms.createTable(new Table("t" + j,dbNames[i],"me",now,now,0,sd,partCols,emptyParameters,null,null,null));
for (int k=0; k < parallelFactor; k++) {
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/region=" + k);
Partition part=new Partition(Arrays.asList("p" + k),dbNames[i],"t" + j,now,now,psd,emptyParameters);
rdbms.addPartition(part);
}
}
}
HBaseImport importer=new HBaseImport("-p","2","-b","2","-d",dbNames[0]);
importer.setConnections(rdbms,store);
importer.run();
for (int i=0; i < dbNames.length; i++) {
Database db=store.getDatabase(dbNames[i]);
Assert.assertNotNull(db);
for (int j=0; j < parallelFactor; j++) {
Table table=store.getTable(db.getName(),"t" + j);
Assert.assertNotNull(table);
Assert.assertEquals(now,table.getLastAccessTime());
Assert.assertEquals("input",table.getSd().getInputFormat());
for (int k=0; k < parallelFactor; k++) {
Partition part=store.getPartition(dbNames[i],"t" + j,Arrays.asList("p" + k));
Assert.assertNotNull(part);
Assert.assertEquals("file:/tmp/region=" + k,part.getSd().getLocation());
}
Assert.assertEquals(parallelFactor,store.getPartitions(dbNames[i],"t" + j,-1).size());
}
Assert.assertEquals(parallelFactor,store.getAllTables(dbNames[i]).size());
}
}
Class: org.apache.hadoop.hive.metastore.hbase.TestHBaseMetastoreSql EqualityVerifier
@Test public void alterRenamePartitioned() throws Exception {
driver.run("create table alterrename (c int) partitioned by (ds string)");
driver.run("alter table alterrename add partition (ds = 'a')");
CommandProcessorResponse rsp=driver.run("describe extended alterrename partition (ds='a')");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("alter table alterrename rename to alter_renamed");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("describe extended alter_renamed partition (ds='a')");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("describe extended alterrename partition (ds='a')");
Assert.assertEquals(10001,rsp.getResponseCode());
}
EqualityVerifier
@Test public void table() throws Exception {
driver.run("create table tbl (c int)");
CommandProcessorResponse rsp=driver.run("insert into table tbl values (3)");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("select * from tbl");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("alter table tbl set tblproperties ('example', 'true')");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("drop table tbl");
Assert.assertEquals(0,rsp.getResponseCode());
}
EqualityVerifier
@Test public void insertIntoPartitionTable() throws Exception {
driver.run("create table iipt (c int) partitioned by (ds string)");
CommandProcessorResponse rsp=driver.run("insert into table iipt partition(ds) values (1, 'today'), (2, 'yesterday')," + "(3, 'tomorrow')");
Assert.assertEquals(0,rsp.getResponseCode());
}
EqualityVerifier
@Test public void alterRename() throws Exception {
driver.run("create table alterrename1 (c int)");
CommandProcessorResponse rsp=driver.run("describe alterrename1");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("alter table alterrename1 rename to alter_renamed1");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("describe alter_renamed1");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("describe alterrename1");
Assert.assertEquals(10001,rsp.getResponseCode());
}
EqualityVerifier
@Test public void role() throws Exception {
CommandProcessorResponse rsp=driver.run("set role admin");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("create role role1");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("grant role1 to user fred with admin option");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("create role role2");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("grant role1 to role role2");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("show principals role1");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("show role grant role role1");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("show role grant user " + System.getProperty("user.name"));
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("show roles");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("revoke admin option for role1 from user fred");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("revoke role1 from user fred");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("revoke role1 from role role2");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("show current roles");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("drop role role2");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("drop role role1");
Assert.assertEquals(0,rsp.getResponseCode());
}
InternalCallVerifier EqualityVerifier
@Test public void grant() throws Exception {
CommandProcessorResponse rsp=driver.run("set role admin");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("create role role3");
Assert.assertEquals(0,rsp.getResponseCode());
driver.run("create table granttbl (c int)");
Assert.assertEquals(0,rsp.getResponseCode());
driver.run("grant select on granttbl to " + System.getProperty("user.name"));
Assert.assertEquals(0,rsp.getResponseCode());
driver.run("grant select on granttbl to role3 with grant option");
Assert.assertEquals(0,rsp.getResponseCode());
driver.run("revoke select on granttbl from " + System.getProperty("user.name"));
Assert.assertEquals(0,rsp.getResponseCode());
driver.run("revoke grant option for select on granttbl from role3");
Assert.assertEquals(0,rsp.getResponseCode());
}
EqualityVerifier
@Test public void insertIntoTable() throws Exception {
driver.run("create table iit (c int)");
CommandProcessorResponse rsp=driver.run("insert into table iit values (3)");
Assert.assertEquals(0,rsp.getResponseCode());
}
EqualityVerifier
@Test public void database() throws Exception {
CommandProcessorResponse rsp=driver.run("create database db");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("set role admin");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("alter database db set dbproperties ('key' = 'value')");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("drop database db");
Assert.assertEquals(0,rsp.getResponseCode());
}
EqualityVerifier
@Test public void describeNonpartitionedTable() throws Exception {
CommandProcessorResponse rsp=driver.run("create table alter1(a int, b int)");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("describe extended alter1");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("alter table alter1 set serdeproperties('s1'='9')");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("describe extended alter1");
Assert.assertEquals(0,rsp.getResponseCode());
}
EqualityVerifier
@Test public void partitionedTable() throws Exception {
driver.run("create table parttbl (c int) partitioned by (ds string)");
CommandProcessorResponse rsp=driver.run("insert into table parttbl partition(ds) values (1, 'today'), (2, 'yesterday')" + ", (3, 'tomorrow')");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("insert into table parttbl partition(ds) values (4, 'today'), (5, 'yesterday')" + ", (6, 'tomorrow')");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("insert into table parttbl partition(ds = 'someday') values (1)");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("insert into table parttbl partition(ds = 'someday') values (2)");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("alter table parttbl add partition (ds = 'whenever')");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("insert into table parttbl partition(ds = 'whenever') values (2)");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("alter table parttbl touch partition (ds = 'whenever')");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("select * from parttbl");
Assert.assertEquals(0,rsp.getResponseCode());
rsp=driver.run("select * from parttbl where ds = 'today'");
Assert.assertEquals(0,rsp.getResponseCode());
}
Class: org.apache.hadoop.hive.metastore.hbase.TestHBaseSchemaTool EqualityVerifier
@Test public void noSuchPart() throws Exception {
ByteArrayOutputStream outStr=new ByteArrayOutputStream();
PrintStream out=new PrintStream(outStr);
ByteArrayOutputStream errStr=new ByteArrayOutputStream();
PrintStream err=new PrintStream(errStr);
new HBaseSchemaTool().go(false,HBaseReadWrite.PART_TABLE,"nosuch",null,conf,out,err);
Assert.assertEquals("No such partition: nosuch" + lsep,outStr.toString());
}
EqualityVerifier
@Test public void noMatchingFunction() throws Exception {
ByteArrayOutputStream outStr=new ByteArrayOutputStream();
PrintStream out=new PrintStream(outStr);
ByteArrayOutputStream errStr=new ByteArrayOutputStream();
PrintStream err=new PrintStream(errStr);
new HBaseSchemaTool().go(false,HBaseReadWrite.FUNC_TABLE,null,"nomatch",conf,out,err);
Assert.assertEquals("No matching function: nomatch" + lsep,outStr.toString());
}
EqualityVerifier
@Test public void noMatchingDb() throws Exception {
ByteArrayOutputStream outStr=new ByteArrayOutputStream();
PrintStream out=new PrintStream(outStr);
ByteArrayOutputStream errStr=new ByteArrayOutputStream();
PrintStream err=new PrintStream(errStr);
new HBaseSchemaTool().go(false,HBaseReadWrite.DB_TABLE,null,"nomatch",conf,out,err);
Assert.assertEquals("No matching database: nomatch" + lsep,outStr.toString());
}
EqualityVerifier
@Test public void noSuchRole() throws Exception {
ByteArrayOutputStream outStr=new ByteArrayOutputStream();
PrintStream out=new PrintStream(outStr);
ByteArrayOutputStream errStr=new ByteArrayOutputStream();
PrintStream err=new PrintStream(errStr);
new HBaseSchemaTool().go(false,HBaseReadWrite.ROLE_TABLE,"nosuch",null,conf,out,err);
Assert.assertEquals("No such role: nosuch" + lsep,outStr.toString());
}
EqualityVerifier
@Test public void bogusTable() throws Exception {
ByteArrayOutputStream outStr=new ByteArrayOutputStream();
PrintStream out=new PrintStream(outStr);
ByteArrayOutputStream errStr=new ByteArrayOutputStream();
PrintStream err=new PrintStream(errStr);
new HBaseSchemaTool().go(false,"nosuch",null,null,conf,out,err);
Assert.assertEquals("Unknown table: nosuch" + lsep,errStr.toString());
}
EqualityVerifier
@Test public void noMatchingPart() throws Exception {
ByteArrayOutputStream outStr=new ByteArrayOutputStream();
PrintStream out=new PrintStream(outStr);
ByteArrayOutputStream errStr=new ByteArrayOutputStream();
PrintStream err=new PrintStream(errStr);
new HBaseSchemaTool().go(false,HBaseReadWrite.PART_TABLE,null,"nomatch",conf,out,err);
Assert.assertEquals("No matching partition: nomatch" + lsep,outStr.toString());
}
EqualityVerifier
@Test public void noSuchFunction() throws Exception {
ByteArrayOutputStream outStr=new ByteArrayOutputStream();
PrintStream out=new PrintStream(outStr);
ByteArrayOutputStream errStr=new ByteArrayOutputStream();
PrintStream err=new PrintStream(errStr);
new HBaseSchemaTool().go(false,HBaseReadWrite.FUNC_TABLE,"nosuch",null,conf,out,err);
Assert.assertEquals("No such function: nosuch" + lsep,outStr.toString());
}
EqualityVerifier
@Test public void noSuchUser() throws Exception {
ByteArrayOutputStream outStr=new ByteArrayOutputStream();
PrintStream out=new PrintStream(outStr);
ByteArrayOutputStream errStr=new ByteArrayOutputStream();
PrintStream err=new PrintStream(errStr);
new HBaseSchemaTool().go(false,HBaseReadWrite.USER_TO_ROLE_TABLE,"nosuch",null,conf,out,err);
Assert.assertEquals("No such user: nosuch" + lsep,outStr.toString());
}
EqualityVerifier
@Test public void noSuchStorageDescriptor() throws Exception {
ByteArrayOutputStream outStr=new ByteArrayOutputStream();
PrintStream out=new PrintStream(outStr);
ByteArrayOutputStream errStr=new ByteArrayOutputStream();
PrintStream err=new PrintStream(errStr);
new HBaseSchemaTool().go(false,HBaseReadWrite.SD_TABLE,"nosuch",null,conf,out,err);
Assert.assertEquals("No such storage descriptor: nosucg" + lsep,outStr.toString());
}
EqualityVerifier
@Test public void oneMondoTest() throws Exception {
HBaseSchemaTool tool=new HBaseSchemaTool();
ByteArrayOutputStream outStr=new ByteArrayOutputStream();
PrintStream out=new PrintStream(outStr);
ByteArrayOutputStream errStr=new ByteArrayOutputStream();
PrintStream err=new PrintStream(errStr);
tool.go(false,HBaseReadWrite.SD_TABLE,null,"whatever",conf,out,err);
Assert.assertEquals("No storage descriptors" + lsep,outStr.toString());
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.SEQUENCES_TABLE,null,"whatever",conf,out,err);
Assert.assertEquals("No sequences" + lsep,outStr.toString());
String[] dbNames=new String[3];
for (int i=0; i < dbNames.length; i++) {
dbNames[i]="db" + i;
Database db=new Database(dbNames[i],"no description","file:///tmp",emptyParameters);
store.createDatabase(db);
}
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.DB_TABLE,"db0",null,conf,out,err);
Assert.assertEquals("{\"name\":\"db0\",\"description\":\"no description\"," + "\"locationUri\":\"file:///tmp\",\"parameters\":{}}" + lsep,outStr.toString());
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.DB_TABLE,null,".*",conf,out,err);
Assert.assertEquals("{\"name\":\"db0\",\"description\":\"no description\"," + "\"locationUri\":\"file:///tmp\",\"parameters\":{}}" + lsep + "{\"name\":\"db1\",\"description\":\"no description\","+ "\"locationUri\":\"file:///tmp\",\"parameters\":{}}"+ lsep+ "{\"name\":\"db2\",\"description\":\"no description\","+ "\"locationUri\":\"file:///tmp\",\"parameters\":{}}"+ lsep,outStr.toString());
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.DB_TABLE,null,"db[12]",conf,out,err);
Assert.assertEquals("{\"name\":\"db1\",\"description\":\"no description\"," + "\"locationUri\":\"file:///tmp\",\"parameters\":{}}" + lsep + "{\"name\":\"db2\",\"description\":\"no description\","+ "\"locationUri\":\"file:///tmp\",\"parameters\":{}}"+ lsep,outStr.toString());
String[] roleNames=new String[2];
for (int i=0; i < roleNames.length; i++) {
roleNames[i]="role" + i;
store.addRole(roleNames[i],"me");
}
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.ROLE_TABLE,null,"role.",conf,out,err);
Assert.assertEquals("{\"roleName\":\"role0\",\"createTime\":now,\"ownerName\":\"me\"}" + lsep + "{\"roleName\":\"role1\",\"createTime\":now,\"ownerName\":\"me\"}"+ lsep,outStr.toString().replaceAll("createTime\":[0-9]+","createTime\":now"));
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.ROLE_TABLE,"role1",null,conf,out,err);
Assert.assertEquals("{\"roleName\":\"role1\",\"createTime\":now,\"ownerName\":\"me\"}" + lsep,outStr.toString().replaceAll("createTime\":[0-9]+","createTime\":now"));
Role role1=store.getRole("role1");
store.grantRole(role1,"fred",PrincipalType.USER,"me",PrincipalType.USER,false);
store.grantRole(role1,"joanne",PrincipalType.USER,"me",PrincipalType.USER,false);
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.USER_TO_ROLE_TABLE,null,".*",conf,out,err);
Assert.assertEquals("fred: role1" + lsep + "joanne: role1"+ lsep,outStr.toString());
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.USER_TO_ROLE_TABLE,"joanne",null,conf,out,err);
Assert.assertEquals("role1" + lsep,outStr.toString());
String[] funcNames=new String[3];
for (int i=0; i < funcNames.length; i++) {
funcNames[i]="func" + i;
Function function=new Function(funcNames[i],"db1","Function","me",PrincipalType.USER,0,FunctionType.JAVA,null);
store.createFunction(function);
}
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.FUNC_TABLE,"db1.func0",null,conf,out,err);
Assert.assertEquals("{\"functionName\":\"func0\",\"dbName\":\"db1\"," + "\"className\":\"Function\",\"ownerName\":\"me\",\"ownerType\":1,\"createTime\":0," + "\"functionType\":1}"+ lsep,outStr.toString());
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.FUNC_TABLE,null,".*",conf,out,err);
Assert.assertEquals("{\"functionName\":\"func0\",\"dbName\":\"db1\"," + "\"className\":\"Function\",\"ownerName\":\"me\",\"ownerType\":1,\"createTime\":0," + "\"functionType\":1}" + lsep + "{\"functionName\":\"func1\",\"dbName\":\"db1\","+ "\"className\":\"Function\",\"ownerName\":\"me\",\"ownerType\":1,\"createTime\":0,"+ "\"functionType\":1}"+ lsep+ "{\"functionName\":\"func2\",\"dbName\":\"db1\","+ "\"className\":\"Function\",\"ownerName\":\"me\",\"ownerType\":1,\"createTime\":0,"+ "\"functionType\":1}"+ lsep,outStr.toString());
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.FUNC_TABLE,null,"db1.func[12]",conf,out,err);
Assert.assertEquals("{\"functionName\":\"func1\",\"dbName\":\"db1\"," + "\"className\":\"Function\",\"ownerName\":\"me\",\"ownerType\":1,\"createTime\":0," + "\"functionType\":1}" + lsep + "{\"functionName\":\"func2\",\"dbName\":\"db1\","+ "\"className\":\"Function\",\"ownerName\":\"me\",\"ownerType\":1,\"createTime\":0,"+ "\"functionType\":1}"+ lsep,outStr.toString());
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.GLOBAL_PRIVS_TABLE,null,null,conf,out,err);
Assert.assertEquals("No global privileges" + lsep,outStr.toString());
List privileges=new ArrayList<>();
HiveObjectRef hiveObjRef=new HiveObjectRef(HiveObjectType.GLOBAL,"db0","tab0",null,null);
PrivilegeGrantInfo grantInfo=new PrivilegeGrantInfo("read",0,"me",PrincipalType.USER,false);
HiveObjectPrivilege hop=new HiveObjectPrivilege(hiveObjRef,"user",PrincipalType.USER,grantInfo);
privileges.add(hop);
grantInfo=new PrivilegeGrantInfo("create",0,"me",PrincipalType.USER,true);
hop=new HiveObjectPrivilege(hiveObjRef,"user",PrincipalType.USER,grantInfo);
privileges.add(hop);
PrivilegeBag pBag=new PrivilegeBag(privileges);
store.grantPrivileges(pBag);
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.GLOBAL_PRIVS_TABLE,null,null,conf,out,err);
Assert.assertEquals("{\"userPrivileges\":{\"user\":[{\"privilege\":\"read\",\"createTime\":0," + "\"grantor\":\"me\",\"grantorType\":1,\"grantOption\":0},{\"privilege\":\"create\"," + "\"createTime\":0,\"grantor\":\"me\",\"grantorType\":1,\"grantOption\":1}]}}"+ lsep,outStr.toString());
String[] tableNames=new String[3];
for (int i=0; i < tableNames.length; i++) {
tableNames[i]="tab" + i;
StorageDescriptor sd=new StorageDescriptor(Arrays.asList(new FieldSchema("col1","int",""),new FieldSchema("col2","varchar(32)","")),"/tmp",null,null,false,0,null,null,null,Collections.emptyMap());
Table tab=new Table(tableNames[i],dbNames[0],"me",0,0,0,sd,Arrays.asList(new FieldSchema("pcol1","string",""),new FieldSchema("pcol2","string","")),Collections.emptyMap(),null,null,null);
store.createTable(tab);
}
ColumnStatisticsDesc tableStatsDesc=new ColumnStatisticsDesc(false,"db0","tab0");
ColumnStatisticsData tcsd=new ColumnStatisticsData();
LongColumnStatsData tlcsd=new LongColumnStatsData(1,2);
tlcsd.setLowValue(-95);
tlcsd.setHighValue(95);
tcsd.setLongStats(tlcsd);
ColumnStatisticsData tcsd2=new ColumnStatisticsData();
tcsd2.setStringStats(new StringColumnStatsData(97,18.78,29,397));
List tcsos=Arrays.asList(new ColumnStatisticsObj("col1","int",tcsd),new ColumnStatisticsObj("col2","varchar(32)",tcsd2));
ColumnStatistics tStatObj=new ColumnStatistics(tableStatsDesc,tcsos);
store.updateTableColumnStatistics(tStatObj);
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.TABLE_TABLE,"db0.tab1",null,conf,out,err);
Assert.assertEquals("{\"tableName\":\"tab1\",\"dbName\":\"db0\",\"owner\":\"me\"," + "\"createTime\":0,\"lastAccessTime\":0,\"retention\":0," + "\"partitionKeys\":[{\"name\":\"pcol1\",\"type\":\"string\",\"comment\":\"\"},"+ "{\"name\":\"pcol2\",\"type\":\"string\",\"comment\":\"\"}],\"parameters\":{},"+ "\"tableType\":\"\"} sdHash: qQTgZAi5VzgpozzFGmIVTQ stats:"+ lsep,outStr.toString());
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.TABLE_TABLE,null,"db0.*",conf,out,err);
Assert.assertEquals("{\"tableName\":\"tab0\",\"dbName\":\"db0\",\"owner\":\"me\"," + "\"createTime\":0,\"lastAccessTime\":0,\"retention\":0," + "\"partitionKeys\":[{\"name\":\"pcol1\",\"type\":\"string\",\"comment\":\"\"},"+ "{\"name\":\"pcol2\",\"type\":\"string\",\"comment\":\"\"}],\"parameters\":{\"COLUMN_STATS_ACCURATE\":\"{\\\"COLUMN_STATS\\\":{\\\"col1\\\":\\\"true\\\",\\\"col2\\\":\\\"true\\\"}}\"},"+ "\"tableType\":\"\"} sdHash: qQTgZAi5VzgpozzFGmIVTQ stats: column "+ "col1: {\"colName\":\"col1\",\"colType\":\"int\","+ "\"statsData\":{\"longStats\":{\"lowValue\":-95,\"highValue\":95,\"numNulls\":1,"+ "\"numDVs\":2,\"bitVectors\":\"\"}}} column col2: {\"colName\":\"col2\",\"colType\":\"varchar(32)\","+ "\"statsData\":{\"stringStats\":{\"maxColLen\":97,\"avgColLen\":18.78,"+ "\"numNulls\":29,\"numDVs\":397,\"bitVectors\":\"\"}}}" + lsep + "{\"tableName\":\"tab1\",\"dbName\":\"db0\",\"owner\":\"me\",\"createTime\":0,"+ "\"lastAccessTime\":0,\"retention\":0,\"partitionKeys\":[{\"name\":\"pcol1\","+ "\"type\":\"string\",\"comment\":\"\"},{\"name\":\"pcol2\",\"type\":\"string\","+ "\"comment\":\"\"}],\"parameters\":{},\"tableType\":\"\"} sdHash: "+ "qQTgZAi5VzgpozzFGmIVTQ stats:"+ lsep+ "{\"tableName\":\"tab2\",\"dbName\":\"db0\",\"owner\":\"me\",\"createTime\":0,"+ "\"lastAccessTime\":0,\"retention\":0,\"partitionKeys\":[{\"name\":\"pcol1\","+ "\"type\":\"string\",\"comment\":\"\"},{\"name\":\"pcol2\",\"type\":\"string\","+ "\"comment\":\"\"}],\"parameters\":{},\"tableType\":\"\"} sdHash: "+ "qQTgZAi5VzgpozzFGmIVTQ stats:"+ lsep,outStr.toString());
List> partVals=Arrays.asList(Arrays.asList("a","b"),Arrays.asList("c","d"));
for ( List pv : partVals) {
StorageDescriptor sd=new StorageDescriptor(Arrays.asList(new FieldSchema("col1","int",""),new FieldSchema("col2","varchar(32)","")),"/tmp",null,null,false,0,null,null,null,Collections.emptyMap());
Partition p=new Partition(pv,"db0","tab1",0,0,sd,Collections.emptyMap());
store.addPartition(p);
}
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.PART_TABLE,"db0.tab1.a.b",null,conf,out,err);
Assert.assertEquals("{\"values\":[\"a\",\"b\"],\"dbName\":\"db0\",\"tableName\":\"tab1\"," + "\"createTime\":0,\"lastAccessTime\":0,\"parameters\":{}} sdHash: " + "qQTgZAi5VzgpozzFGmIVTQ stats:"+ lsep,outStr.toString());
ColumnStatisticsDesc statsDesc=new ColumnStatisticsDesc(false,"db0","tab1");
statsDesc.setPartName("pcol1=c/pcol2=d");
ColumnStatisticsData csd1=new ColumnStatisticsData();
LongColumnStatsData lcsd=new LongColumnStatsData(1,2);
lcsd.setLowValue(-95);
lcsd.setHighValue(95);
csd1.setLongStats(lcsd);
ColumnStatisticsData csd2=new ColumnStatisticsData();
csd2.setStringStats(new StringColumnStatsData(97,18.78,29,397));
List csos=Arrays.asList(new ColumnStatisticsObj("col1","int",csd1),new ColumnStatisticsObj("col2","varchar(32)",csd2));
ColumnStatistics statsObj=new ColumnStatistics(statsDesc,csos);
store.updatePartitionColumnStatistics(statsObj,partVals.get(1));
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.PART_TABLE,"db0.tab1.c.d",null,conf,out,err);
Assert.assertEquals("{\"values\":[\"c\",\"d\"],\"dbName\":\"db0\",\"tableName\":\"tab1\"," + "\"createTime\":0,\"lastAccessTime\":0,\"parameters\":{\"COLUMN_STATS_ACCURATE\":\"{\\\"COLUMN_STATS\\\":{\\\"col1\\\":\\\"true\\\",\\\"col2\\\":\\\"true\\\"}}\"}} sdHash: qQTgZAi5VzgpozzFGmIVTQ " + "stats: column col1: {\"colName\":\"col1\",\"colType\":\"int\","+ "\"statsData\":{\"longStats\":{\"lowValue\":-95,\"highValue\":95,\"numNulls\":1,"+ "\"numDVs\":2,\"bitVectors\":\"\"}}} column col2: {\"colName\":\"col2\",\"colType\":\"varchar(32)\","+ "\"statsData\":{\"stringStats\":{\"maxColLen\":97,\"avgColLen\":18.78,\"numNulls\":29,"+ "\"numDVs\":397,\"bitVectors\":\"\"}}}"+ lsep,outStr.toString());
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.PART_TABLE,null,"db0.tab1",conf,out,err);
Assert.assertEquals("{\"values\":[\"a\",\"b\"],\"dbName\":\"db0\",\"tableName\":\"tab1\"," + "\"createTime\":0,\"lastAccessTime\":0,\"parameters\":{}} sdHash: qQTgZAi5VzgpozzFGmIVTQ " + "stats:" + lsep + "{\"values\":[\"c\",\"d\"],\"dbName\":\"db0\",\"tableName\":\"tab1\",\"createTime\":0,"+ "\"lastAccessTime\":0,\"parameters\":{\"COLUMN_STATS_ACCURATE\":\"{\\\"COLUMN_STATS\\\":{\\\"col1\\\":\\\"true\\\",\\\"col2\\\":\\\"true\\\"}}\"}} sdHash: qQTgZAi5VzgpozzFGmIVTQ stats: column "+ "col1: {\"colName\":\"col1\",\"colType\":\"int\","+ "\"statsData\":{\"longStats\":{\"lowValue\":-95,\"highValue\":95,\"numNulls\":1,"+ "\"numDVs\":2,\"bitVectors\":\"\"}}} column col2: {\"colName\":\"col2\",\"colType\":\"varchar(32)\","+ "\"statsData\":{\"stringStats\":{\"maxColLen\":97,\"avgColLen\":18.78,\"numNulls\":29,"+ "\"numDVs\":397,\"bitVectors\":\"\"}}}"+ lsep,outStr.toString());
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.PART_TABLE,null,"db0.tab1.a",conf,out,err);
Assert.assertEquals("{\"values\":[\"a\",\"b\"],\"dbName\":\"db0\",\"tableName\":\"tab1\"," + "\"createTime\":0,\"lastAccessTime\":0,\"parameters\":{}} sdHash: qQTgZAi5VzgpozzFGmIVTQ " + "stats:"+ lsep,outStr.toString());
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.SD_TABLE,"qQTgZAi5VzgpozzFGmIVTQ",null,conf,out,err);
Assert.assertEquals("{\"cols\":[{\"name\":\"col1\",\"type\":\"int\",\"comment\":\"\"}," + "{\"name\":\"col2\",\"type\":\"varchar(32)\",\"comment\":\"\"}],\"compressed\":0," + "\"numBuckets\":0,\"bucketCols\":[],\"sortCols\":[],\"storedAsSubDirectories\":0}"+ lsep,outStr.toString());
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.SD_TABLE,null,"whatever",conf,out,err);
Assert.assertEquals("qQTgZAi5VzgpozzFGmIVTQ: {\"cols\":[{\"name\":\"col1\",\"type\":\"int\"," + "\"comment\":\"\"}," + "{\"name\":\"col2\",\"type\":\"varchar(32)\",\"comment\":\"\"}],\"compressed\":0,"+ "\"numBuckets\":0,\"bucketCols\":[],\"sortCols\":[],\"storedAsSubDirectories\":0}"+ lsep,outStr.toString());
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.SECURITY_TABLE,null,"whatever",conf,out,err);
Assert.assertEquals("No security related entries" + lsep,outStr.toString());
store.addMasterKey("this be a key");
store.addToken("tokenid","delegation token");
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.SECURITY_TABLE,null,"whatever",conf,out,err);
Assert.assertEquals("Master key 0: this be a key" + lsep + "Delegation token tokenid: delegation token"+ lsep,outStr.toString());
outStr=new ByteArrayOutputStream();
out=new PrintStream(outStr);
tool.go(false,HBaseReadWrite.SEQUENCES_TABLE,null,"whatever",conf,out,err);
Assert.assertEquals("mk: 1" + lsep,outStr.toString());
}
EqualityVerifier
@Test public void noMatchingUser() throws Exception {
ByteArrayOutputStream outStr=new ByteArrayOutputStream();
PrintStream out=new PrintStream(outStr);
ByteArrayOutputStream errStr=new ByteArrayOutputStream();
PrintStream err=new PrintStream(errStr);
new HBaseSchemaTool().go(false,HBaseReadWrite.USER_TO_ROLE_TABLE,null,"nomatch",conf,out,err);
Assert.assertEquals("No matching user: nomatch" + lsep,outStr.toString());
}
EqualityVerifier
@Test public void listTables() throws Exception {
ByteArrayOutputStream outStr=new ByteArrayOutputStream();
PrintStream out=new PrintStream(outStr);
ByteArrayOutputStream errStr=new ByteArrayOutputStream();
PrintStream err=new PrintStream(errStr);
new HBaseSchemaTool().go(true,null,null,null,conf,out,err);
Assert.assertEquals(StringUtils.join(HBaseReadWrite.tableNames,lsep) + lsep,outStr.toString());
}
EqualityVerifier
@Test public void noMatchingTable() throws Exception {
ByteArrayOutputStream outStr=new ByteArrayOutputStream();
PrintStream out=new PrintStream(outStr);
ByteArrayOutputStream errStr=new ByteArrayOutputStream();
PrintStream err=new PrintStream(errStr);
new HBaseSchemaTool().go(false,HBaseReadWrite.TABLE_TABLE,null,"nomatch",conf,out,err);
Assert.assertEquals("No matching table: nomatch" + lsep,outStr.toString());
}
EqualityVerifier
@Test public void noSuchDb() throws Exception {
ByteArrayOutputStream outStr=new ByteArrayOutputStream();
PrintStream out=new PrintStream(outStr);
ByteArrayOutputStream errStr=new ByteArrayOutputStream();
PrintStream err=new PrintStream(errStr);
new HBaseSchemaTool().go(false,HBaseReadWrite.DB_TABLE,"nosuch",null,conf,out,err);
Assert.assertEquals("No such database: nosuch" + lsep,outStr.toString());
}
EqualityVerifier
@Test public void noMatchingPartValidFormat() throws Exception {
ByteArrayOutputStream outStr=new ByteArrayOutputStream();
PrintStream out=new PrintStream(outStr);
ByteArrayOutputStream errStr=new ByteArrayOutputStream();
PrintStream err=new PrintStream(errStr);
new HBaseSchemaTool().go(false,HBaseReadWrite.PART_TABLE,null,"nomatch.a.b",conf,out,err);
Assert.assertEquals("No matching partition: nomatch.a.b" + lsep,outStr.toString());
}
EqualityVerifier
@Test public void noSuchTable() throws Exception {
ByteArrayOutputStream outStr=new ByteArrayOutputStream();
PrintStream out=new PrintStream(outStr);
ByteArrayOutputStream errStr=new ByteArrayOutputStream();
PrintStream err=new PrintStream(errStr);
new HBaseSchemaTool().go(false,HBaseReadWrite.TABLE_TABLE,"nosuch",null,conf,out,err);
Assert.assertEquals("No such table: nosuch" + lsep,outStr.toString());
}
EqualityVerifier
@Test public void noSuchPartValidFormat() throws Exception {
ByteArrayOutputStream outStr=new ByteArrayOutputStream();
PrintStream out=new PrintStream(outStr);
ByteArrayOutputStream errStr=new ByteArrayOutputStream();
PrintStream err=new PrintStream(errStr);
new HBaseSchemaTool().go(false,HBaseReadWrite.PART_TABLE,"default.nosuch.nosuch",null,conf,out,err);
Assert.assertEquals("No such partition: default.nosuch.nosuch" + lsep,outStr.toString());
}
EqualityVerifier
@Test public void noMatchingRole() throws Exception {
ByteArrayOutputStream outStr=new ByteArrayOutputStream();
PrintStream out=new PrintStream(outStr);
ByteArrayOutputStream errStr=new ByteArrayOutputStream();
PrintStream err=new PrintStream(errStr);
new HBaseSchemaTool().go(false,HBaseReadWrite.ROLE_TABLE,null,"nomatch",conf,out,err);
Assert.assertEquals("No matching role: nomatch" + lsep,outStr.toString());
}
Class: org.apache.hadoop.hive.metastore.hbase.TestHBaseSchemaTool2 EqualityVerifier
@Test public void install(){
ByteArrayOutputStream outStr=new ByteArrayOutputStream();
PrintStream out=new PrintStream(outStr);
ByteArrayOutputStream errStr=new ByteArrayOutputStream();
PrintStream err=new PrintStream(errStr);
HBaseSchemaTool tool=new HBaseSchemaTool();
tool.install(conf,err);
tool.go(true,null,null,null,conf,out,err);
Assert.assertEquals(StringUtils.join(HBaseReadWrite.tableNames,lsep) + lsep,outStr.toString());
}
Class: org.apache.hadoop.hive.metastore.hbase.TestHBaseStore APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void binaryPartitionStatistics() throws Exception {
createMockTableAndPartition(BINARY_TYPE,BINARY_VAL);
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=getMockPartColStatsDesc(PART_KEY,BINARY_VAL);
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=binaryColStatsObjs.get(0);
BinaryColumnStatsData binaryData=obj.getStatsData().getBinaryStats();
stats.addToStatsObj(obj);
List parVals=new ArrayList();
parVals.add(BINARY_VAL);
store.updatePartitionColumnStatistics(stats,parVals);
List partNames=new ArrayList();
partNames.add(desc.getPartName());
List colNames=new ArrayList();
colNames.add(obj.getColName());
List statsFromDB=store.getPartitionColumnStatistics(DB,TBL,partNames,colNames);
Assert.assertEquals(1,statsFromDB.size());
Assert.assertEquals(desc.getLastAnalyzed(),statsFromDB.get(0).getStatsDesc().getLastAnalyzed());
Assert.assertEquals(DB,statsFromDB.get(0).getStatsDesc().getDbName());
Assert.assertEquals(TBL,statsFromDB.get(0).getStatsDesc().getTableName());
Assert.assertFalse(statsFromDB.get(0).getStatsDesc().isIsTblLevel());
Assert.assertEquals(1,statsFromDB.get(0).getStatsObjSize());
ColumnStatisticsObj objFromDB=statsFromDB.get(0).getStatsObj().get(0);
ColumnStatisticsData dataFromDB=objFromDB.getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.BINARY_STATS,dataFromDB.getSetField());
BinaryColumnStatsData binaryDataFromDB=dataFromDB.getBinaryStats();
Assert.assertEquals(binaryData.getMaxColLen(),binaryDataFromDB.getMaxColLen());
Assert.assertEquals(binaryData.getAvgColLen(),binaryDataFromDB.getAvgColLen(),0.01);
Assert.assertEquals(binaryData.getNumNulls(),binaryDataFromDB.getNumNulls());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void doubleTableStatistics() throws Exception {
createMockTable(DOUBLE_TYPE);
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=getMockTblColStatsDesc();
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=doubleColStatsObjs.get(0);
DoubleColumnStatsData doubleData=obj.getStatsData().getDoubleStats();
stats.addToStatsObj(obj);
store.updateTableColumnStatistics(stats);
ColumnStatistics statsFromDB=store.getTableColumnStatistics(DB,TBL,Arrays.asList(DOUBLE_COL));
Assert.assertEquals(desc.getLastAnalyzed(),statsFromDB.getStatsDesc().getLastAnalyzed());
Assert.assertEquals(DB,statsFromDB.getStatsDesc().getDbName());
Assert.assertEquals(TBL,statsFromDB.getStatsDesc().getTableName());
Assert.assertTrue(statsFromDB.getStatsDesc().isIsTblLevel());
Assert.assertEquals(1,statsFromDB.getStatsObjSize());
ColumnStatisticsObj objFromDB=statsFromDB.getStatsObj().get(0);
ColumnStatisticsData dataFromDB=objFromDB.getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.DOUBLE_STATS,dataFromDB.getSetField());
DoubleColumnStatsData doubleDataFromDB=dataFromDB.getDoubleStats();
Assert.assertEquals(doubleData.getHighValue(),doubleDataFromDB.getHighValue(),0.01);
Assert.assertEquals(doubleData.getLowValue(),doubleDataFromDB.getLowValue(),0.01);
Assert.assertEquals(doubleData.getNumNulls(),doubleDataFromDB.getNumNulls());
Assert.assertEquals(doubleData.getNumDVs(),doubleDataFromDB.getNumDVs());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void decimalTableStatistics() throws Exception {
createMockTable(DECIMAL_TYPE);
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=getMockTblColStatsDesc();
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=decimalColStatsObjs.get(0);
DecimalColumnStatsData decimalData=obj.getStatsData().getDecimalStats();
stats.addToStatsObj(obj);
store.updateTableColumnStatistics(stats);
ColumnStatistics statsFromDB=store.getTableColumnStatistics(DB,TBL,Arrays.asList(DECIMAL_COL));
Assert.assertEquals(desc.getLastAnalyzed(),statsFromDB.getStatsDesc().getLastAnalyzed());
Assert.assertEquals(DB,statsFromDB.getStatsDesc().getDbName());
Assert.assertEquals(TBL,statsFromDB.getStatsDesc().getTableName());
Assert.assertTrue(statsFromDB.getStatsDesc().isIsTblLevel());
Assert.assertEquals(1,statsFromDB.getStatsObjSize());
ColumnStatisticsObj objFromDB=statsFromDB.getStatsObj().get(0);
ColumnStatisticsData dataFromDB=objFromDB.getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.DECIMAL_STATS,dataFromDB.getSetField());
DecimalColumnStatsData decimalDataFromDB=dataFromDB.getDecimalStats();
Assert.assertEquals(decimalData.getHighValue(),decimalDataFromDB.getHighValue());
Assert.assertEquals(decimalData.getLowValue(),decimalDataFromDB.getLowValue());
Assert.assertEquals(decimalData.getNumNulls(),decimalDataFromDB.getNumNulls());
Assert.assertEquals(decimalData.getNumDVs(),decimalDataFromDB.getNumDVs());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void stringPartitionStatistics() throws Exception {
createMockTableAndPartition(STRING_TYPE,STRING_VAL);
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=getMockPartColStatsDesc(PART_KEY,STRING_VAL);
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=stringColStatsObjs.get(0);
StringColumnStatsData stringData=obj.getStatsData().getStringStats();
stats.addToStatsObj(obj);
List parVals=new ArrayList();
parVals.add(STRING_VAL);
store.updatePartitionColumnStatistics(stats,parVals);
List partNames=new ArrayList();
partNames.add(desc.getPartName());
List colNames=new ArrayList();
colNames.add(obj.getColName());
List statsFromDB=store.getPartitionColumnStatistics(DB,TBL,partNames,colNames);
Assert.assertEquals(1,statsFromDB.size());
Assert.assertEquals(desc.getLastAnalyzed(),statsFromDB.get(0).getStatsDesc().getLastAnalyzed());
Assert.assertEquals(DB,statsFromDB.get(0).getStatsDesc().getDbName());
Assert.assertEquals(TBL,statsFromDB.get(0).getStatsDesc().getTableName());
Assert.assertFalse(statsFromDB.get(0).getStatsDesc().isIsTblLevel());
Assert.assertEquals(1,statsFromDB.get(0).getStatsObjSize());
ColumnStatisticsObj objFromDB=statsFromDB.get(0).getStatsObj().get(0);
ColumnStatisticsData dataFromDB=objFromDB.getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.STRING_STATS,dataFromDB.getSetField());
StringColumnStatsData stringDataFromDB=dataFromDB.getStringStats();
Assert.assertEquals(stringData.getMaxColLen(),stringDataFromDB.getMaxColLen());
Assert.assertEquals(stringData.getAvgColLen(),stringDataFromDB.getAvgColLen(),0.01);
Assert.assertEquals(stringData.getNumNulls(),stringDataFromDB.getNumNulls());
Assert.assertEquals(stringData.getNumDVs(),stringDataFromDB.getNumDVs());
}
InternalCallVerifier EqualityVerifier
@Test public void getPartitions() throws Exception {
String tableName="manyParts";
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
List partCols=new ArrayList();
partCols.add(new FieldSchema("pc","string",""));
Table table=new Table(tableName,DB,"me",startTime,startTime,0,sd,partCols,emptyParameters,null,null,null);
store.createTable(table);
List partVals=Arrays.asList("alan","bob","carl","doug","ethan");
for ( String val : partVals) {
List vals=new ArrayList();
vals.add(val);
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/pc=" + val);
Partition part=new Partition(vals,DB,tableName,startTime,startTime,psd,emptyParameters);
store.addPartition(part);
Partition p=store.getPartition(DB,tableName,vals);
Assert.assertEquals("file:/tmp/pc=" + val,p.getSd().getLocation());
}
List parts=store.getPartitions(DB,tableName,-1);
Assert.assertEquals(5,parts.size());
String[] pv=new String[5];
for (int i=0; i < 5; i++) pv[i]=parts.get(i).getValues().get(0);
Arrays.sort(pv);
Assert.assertArrayEquals(pv,partVals.toArray(new String[5]));
}
InternalCallVerifier EqualityVerifier
@Test public void createDb() throws Exception {
String dbname="mydb";
Database db=new Database(dbname,"no description","file:///tmp",emptyParameters);
store.createDatabase(db);
Database d=store.getDatabase(dbname);
Assert.assertEquals(dbname,d.getName());
Assert.assertEquals("no description",d.getDescription());
Assert.assertEquals("file:///tmp",d.getLocationUri());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void booleanTableStatistics() throws Exception {
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=getMockTblColStatsDesc();
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=booleanColStatsObjs.get(0);
BooleanColumnStatsData boolData=obj.getStatsData().getBooleanStats();
stats.addToStatsObj(obj);
store.updateTableColumnStatistics(stats);
ColumnStatistics statsFromDB=store.getTableColumnStatistics(DB,TBL,Arrays.asList(BOOLEAN_COL));
Assert.assertEquals(desc.getLastAnalyzed(),statsFromDB.getStatsDesc().getLastAnalyzed());
Assert.assertEquals(DB,statsFromDB.getStatsDesc().getDbName());
Assert.assertEquals(TBL,statsFromDB.getStatsDesc().getTableName());
Assert.assertTrue(statsFromDB.getStatsDesc().isIsTblLevel());
Assert.assertEquals(1,statsFromDB.getStatsObjSize());
ColumnStatisticsObj objFromDB=statsFromDB.getStatsObj().get(0);
ColumnStatisticsData dataFromDB=objFromDB.getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.BOOLEAN_STATS,dataFromDB.getSetField());
BooleanColumnStatsData boolDataFromDB=dataFromDB.getBooleanStats();
Assert.assertEquals(boolData.getNumTrues(),boolDataFromDB.getNumTrues());
Assert.assertEquals(boolData.getNumFalses(),boolDataFromDB.getNumFalses());
Assert.assertEquals(boolData.getNumNulls(),boolDataFromDB.getNumNulls());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void hashSd() throws Exception {
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int",""));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",true,0,serde,null,null,emptyParameters);
Map,String> map=new HashMap,String>();
map.put(Arrays.asList("col3"),"col4");
SkewedInfo skew=new SkewedInfo(Arrays.asList("col1"),Arrays.asList(Arrays.asList("col2")),map);
sd.setSkewedInfo(skew);
MessageDigest md=MessageDigest.getInstance("MD5");
byte[] baseHash=HBaseUtils.hashStorageDescriptor(sd,md);
StorageDescriptor changeSchema=new StorageDescriptor(sd);
changeSchema.getCols().add(new FieldSchema("col2","varchar(32)","a comment"));
byte[] schemaHash=HBaseUtils.hashStorageDescriptor(changeSchema,md);
Assert.assertFalse(Arrays.equals(baseHash,schemaHash));
StorageDescriptor changeLocation=new StorageDescriptor(sd);
changeLocation.setLocation("file:/somewhere/else");
byte[] locationHash=HBaseUtils.hashStorageDescriptor(changeLocation,md);
Assert.assertArrayEquals(baseHash,locationHash);
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void alterPartition() throws Exception {
String tableName="alterparttable";
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
List partCols=new ArrayList();
partCols.add(new FieldSchema("pc","string",""));
Table table=new Table(tableName,DB,"me",startTime,startTime,0,sd,partCols,emptyParameters,null,null,null);
store.createTable(table);
List vals=Arrays.asList("fred");
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/pc=fred");
Partition part=new Partition(vals,DB,tableName,startTime,startTime,psd,emptyParameters);
store.addPartition(part);
part.setLastAccessTime(startTime + 10);
store.alterPartition(DB,tableName,vals,part);
Partition p=store.getPartition(DB,tableName,vals);
Assert.assertEquals(1,p.getSd().getColsSize());
Assert.assertEquals("col1",p.getSd().getCols().get(0).getName());
Assert.assertEquals("int",p.getSd().getCols().get(0).getType());
Assert.assertEquals("nocomment",p.getSd().getCols().get(0).getComment());
Assert.assertEquals("serde",p.getSd().getSerdeInfo().getName());
Assert.assertEquals("seriallib",p.getSd().getSerdeInfo().getSerializationLib());
Assert.assertEquals("file:/tmp/pc=fred",p.getSd().getLocation());
Assert.assertEquals("input",p.getSd().getInputFormat());
Assert.assertEquals("output",p.getSd().getOutputFormat());
Assert.assertEquals(DB,p.getDbName());
Assert.assertEquals(tableName,p.getTableName());
Assert.assertEquals(1,p.getValuesSize());
Assert.assertEquals("fred",p.getValues().get(0));
Assert.assertEquals(startTime + 10,p.getLastAccessTime());
Assert.assertTrue(store.doesPartitionExist(DB,tableName,vals));
Assert.assertFalse(store.doesPartitionExist(DB,tableName,Arrays.asList("bob")));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void longTableStatistics() throws Exception {
createMockTable(LONG_TYPE);
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=getMockTblColStatsDesc();
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=longColStatsObjs.get(0);
LongColumnStatsData longData=obj.getStatsData().getLongStats();
stats.addToStatsObj(obj);
store.updateTableColumnStatistics(stats);
ColumnStatistics statsFromDB=store.getTableColumnStatistics(DB,TBL,Arrays.asList(LONG_COL));
Assert.assertEquals(desc.getLastAnalyzed(),statsFromDB.getStatsDesc().getLastAnalyzed());
Assert.assertEquals(DB,statsFromDB.getStatsDesc().getDbName());
Assert.assertEquals(TBL,statsFromDB.getStatsDesc().getTableName());
Assert.assertTrue(statsFromDB.getStatsDesc().isIsTblLevel());
Assert.assertEquals(1,statsFromDB.getStatsObjSize());
ColumnStatisticsObj objFromDB=statsFromDB.getStatsObj().get(0);
ColumnStatisticsData dataFromDB=objFromDB.getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.LONG_STATS,dataFromDB.getSetField());
LongColumnStatsData longDataFromDB=dataFromDB.getLongStats();
Assert.assertEquals(longData.getHighValue(),longDataFromDB.getHighValue());
Assert.assertEquals(longData.getLowValue(),longDataFromDB.getLowValue());
Assert.assertEquals(longData.getNumNulls(),longDataFromDB.getNumNulls());
Assert.assertEquals(longData.getNumDVs(),longDataFromDB.getNumDVs());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void stringTableStatistics() throws Exception {
createMockTable(STRING_TYPE);
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=getMockTblColStatsDesc();
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=stringColStatsObjs.get(0);
StringColumnStatsData stringData=obj.getStatsData().getStringStats();
stats.addToStatsObj(obj);
store.updateTableColumnStatistics(stats);
ColumnStatistics statsFromDB=store.getTableColumnStatistics(DB,TBL,Arrays.asList(STRING_COL));
Assert.assertEquals(desc.getLastAnalyzed(),statsFromDB.getStatsDesc().getLastAnalyzed());
Assert.assertEquals(DB,statsFromDB.getStatsDesc().getDbName());
Assert.assertEquals(TBL,statsFromDB.getStatsDesc().getTableName());
Assert.assertTrue(statsFromDB.getStatsDesc().isIsTblLevel());
Assert.assertEquals(1,statsFromDB.getStatsObjSize());
ColumnStatisticsObj objFromDB=statsFromDB.getStatsObj().get(0);
ColumnStatisticsData dataFromDB=objFromDB.getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.STRING_STATS,dataFromDB.getSetField());
StringColumnStatsData stringDataFromDB=dataFromDB.getStringStats();
Assert.assertEquals(stringData.getMaxColLen(),stringDataFromDB.getMaxColLen());
Assert.assertEquals(stringData.getAvgColLen(),stringDataFromDB.getAvgColLen(),0.01);
Assert.assertEquals(stringData.getNumNulls(),stringDataFromDB.getNumNulls());
Assert.assertEquals(stringData.getNumDVs(),stringDataFromDB.getNumDVs());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void longPartitionStatistics() throws Exception {
createMockTableAndPartition(INT_TYPE,INT_VAL);
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=getMockPartColStatsDesc(PART_KEY,INT_VAL);
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=longColStatsObjs.get(0);
LongColumnStatsData longData=obj.getStatsData().getLongStats();
stats.addToStatsObj(obj);
List parVals=new ArrayList();
parVals.add(INT_VAL);
store.updatePartitionColumnStatistics(stats,parVals);
List partNames=new ArrayList();
partNames.add(desc.getPartName());
List colNames=new ArrayList();
colNames.add(obj.getColName());
List statsFromDB=store.getPartitionColumnStatistics(DB,TBL,partNames,colNames);
Assert.assertEquals(1,statsFromDB.size());
Assert.assertEquals(desc.getLastAnalyzed(),statsFromDB.get(0).getStatsDesc().getLastAnalyzed());
Assert.assertEquals(DB,statsFromDB.get(0).getStatsDesc().getDbName());
Assert.assertEquals(TBL,statsFromDB.get(0).getStatsDesc().getTableName());
Assert.assertFalse(statsFromDB.get(0).getStatsDesc().isIsTblLevel());
Assert.assertEquals(1,statsFromDB.get(0).getStatsObjSize());
ColumnStatisticsObj objFromDB=statsFromDB.get(0).getStatsObj().get(0);
ColumnStatisticsData dataFromDB=objFromDB.getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.LONG_STATS,dataFromDB.getSetField());
LongColumnStatsData longDataFromDB=dataFromDB.getLongStats();
Assert.assertEquals(longData.getHighValue(),longDataFromDB.getHighValue());
Assert.assertEquals(longData.getLowValue(),longDataFromDB.getLowValue());
Assert.assertEquals(longData.getNumNulls(),longDataFromDB.getNumNulls());
Assert.assertEquals(longData.getNumDVs(),longDataFromDB.getNumDVs());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void createRole() throws Exception {
int now=(int)System.currentTimeMillis() / 1000;
String roleName="myrole";
store.addRole(roleName,"me");
Role r=store.getRole(roleName);
Assert.assertEquals(roleName,r.getRoleName());
Assert.assertEquals("me",r.getOwnerName());
Assert.assertTrue(now <= r.getCreateTime());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void createTable() throws Exception {
String tableName="mytable";
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int",""));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
Map params=new HashMap();
params.put("key","value");
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,17,serde,Arrays.asList("bucketcol"),Arrays.asList(new Order("sortcol",1)),params);
Table table=new Table(tableName,"default","me",startTime,startTime,0,sd,null,emptyParameters,null,null,null);
store.createTable(table);
Table t=store.getTable("default",tableName);
Assert.assertEquals(1,t.getSd().getColsSize());
Assert.assertEquals("col1",t.getSd().getCols().get(0).getName());
Assert.assertEquals("int",t.getSd().getCols().get(0).getType());
Assert.assertEquals("",t.getSd().getCols().get(0).getComment());
Assert.assertEquals("serde",t.getSd().getSerdeInfo().getName());
Assert.assertEquals("seriallib",t.getSd().getSerdeInfo().getSerializationLib());
Assert.assertEquals("file:/tmp",t.getSd().getLocation());
Assert.assertEquals("input",t.getSd().getInputFormat());
Assert.assertEquals("output",t.getSd().getOutputFormat());
Assert.assertFalse(t.getSd().isCompressed());
Assert.assertEquals(17,t.getSd().getNumBuckets());
Assert.assertEquals(1,t.getSd().getBucketColsSize());
Assert.assertEquals("bucketcol",t.getSd().getBucketCols().get(0));
Assert.assertEquals(1,t.getSd().getSortColsSize());
Assert.assertEquals("sortcol",t.getSd().getSortCols().get(0).getCol());
Assert.assertEquals(1,t.getSd().getSortCols().get(0).getOrder());
Assert.assertEquals(1,t.getSd().getParametersSize());
Assert.assertEquals("value",t.getSd().getParameters().get("key"));
Assert.assertEquals("me",t.getOwner());
Assert.assertEquals("default",t.getDbName());
Assert.assertEquals(tableName,t.getTableName());
Assert.assertEquals(0,t.getParametersSize());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void listGetDropPartitionNames() throws Exception {
String tableName="listParts";
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
List partCols=new ArrayList();
partCols.add(new FieldSchema("pc","string",""));
partCols.add(new FieldSchema("region","string",""));
Table table=new Table(tableName,DB,"me",startTime,startTime,0,sd,partCols,emptyParameters,null,null,null);
store.createTable(table);
String[][] partVals=new String[][]{{"today","north america"},{"tomorrow","europe"}};
for ( String[] pv : partVals) {
List vals=new ArrayList();
for ( String v : pv) vals.add(v);
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/pc=" + pv[0] + "/region="+ pv[1]);
Partition part=new Partition(vals,DB,tableName,startTime,startTime,psd,emptyParameters);
store.addPartition(part);
}
List names=store.listPartitionNames(DB,tableName,(short)-1);
Assert.assertEquals(2,names.size());
String[] resultNames=names.toArray(new String[names.size()]);
Arrays.sort(resultNames);
Assert.assertArrayEquals(resultNames,new String[]{"pc=today/region=north america","pc=tomorrow/region=europe"});
List parts=store.getPartitionsByNames(DB,tableName,names);
Assert.assertArrayEquals(partVals[0],parts.get(0).getValues().toArray(new String[2]));
Assert.assertArrayEquals(partVals[1],parts.get(1).getValues().toArray(new String[2]));
store.dropPartitions(DB,tableName,names);
List afterDropParts=store.getPartitions(DB,tableName,-1);
Assert.assertEquals(0,afterDropParts.size());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void decimalPartitionStatistics() throws Exception {
createMockTableAndPartition(DECIMAL_TYPE,DECIMAL_VAL);
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=getMockPartColStatsDesc(PART_KEY,DECIMAL_VAL);
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=decimalColStatsObjs.get(0);
DecimalColumnStatsData decimalData=obj.getStatsData().getDecimalStats();
stats.addToStatsObj(obj);
List parVals=new ArrayList();
parVals.add(DECIMAL_VAL);
store.updatePartitionColumnStatistics(stats,parVals);
List partNames=new ArrayList();
partNames.add(desc.getPartName());
List colNames=new ArrayList();
colNames.add(obj.getColName());
List statsFromDB=store.getPartitionColumnStatistics(DB,TBL,partNames,colNames);
Assert.assertEquals(1,statsFromDB.size());
Assert.assertEquals(desc.getLastAnalyzed(),statsFromDB.get(0).getStatsDesc().getLastAnalyzed());
Assert.assertEquals(DB,statsFromDB.get(0).getStatsDesc().getDbName());
Assert.assertEquals(TBL,statsFromDB.get(0).getStatsDesc().getTableName());
Assert.assertFalse(statsFromDB.get(0).getStatsDesc().isIsTblLevel());
Assert.assertEquals(1,statsFromDB.get(0).getStatsObjSize());
ColumnStatisticsObj objFromDB=statsFromDB.get(0).getStatsObj().get(0);
ColumnStatisticsData dataFromDB=objFromDB.getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.DECIMAL_STATS,dataFromDB.getSetField());
DecimalColumnStatsData decimalDataFromDB=dataFromDB.getDecimalStats();
Assert.assertEquals(decimalData.getHighValue(),decimalDataFromDB.getHighValue());
Assert.assertEquals(decimalData.getLowValue(),decimalDataFromDB.getLowValue());
Assert.assertEquals(decimalData.getNumNulls(),decimalDataFromDB.getNumNulls());
Assert.assertEquals(decimalData.getNumDVs(),decimalDataFromDB.getNumDVs());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void createPartition() throws Exception {
String tableName="myparttable";
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
List partCols=new ArrayList();
partCols.add(new FieldSchema("pc","string",""));
Table table=new Table(tableName,DB,"me",startTime,startTime,0,sd,partCols,emptyParameters,null,null,null);
store.createTable(table);
List vals=Arrays.asList("fred");
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/pc=fred");
Partition part=new Partition(vals,DB,tableName,startTime,startTime,psd,emptyParameters);
store.addPartition(part);
Partition p=store.getPartition(DB,tableName,vals);
Assert.assertEquals(1,p.getSd().getColsSize());
Assert.assertEquals("col1",p.getSd().getCols().get(0).getName());
Assert.assertEquals("int",p.getSd().getCols().get(0).getType());
Assert.assertEquals("nocomment",p.getSd().getCols().get(0).getComment());
Assert.assertEquals("serde",p.getSd().getSerdeInfo().getName());
Assert.assertEquals("seriallib",p.getSd().getSerdeInfo().getSerializationLib());
Assert.assertEquals("file:/tmp/pc=fred",p.getSd().getLocation());
Assert.assertEquals("input",p.getSd().getInputFormat());
Assert.assertEquals("output",p.getSd().getOutputFormat());
Assert.assertEquals(DB,p.getDbName());
Assert.assertEquals(tableName,p.getTableName());
Assert.assertEquals(1,p.getValuesSize());
Assert.assertEquals("fred",p.getValues().get(0));
Assert.assertTrue(store.doesPartitionExist(DB,tableName,vals));
Assert.assertFalse(store.doesPartitionExist(DB,tableName,Arrays.asList("bob")));
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void createFunction() throws Exception {
String funcName="createfunc";
int now=(int)(System.currentTimeMillis() / 1000);
Function func=new Function(funcName,DB,"o.a.h.h.myfunc","me",PrincipalType.USER,now,FunctionType.JAVA,Arrays.asList(new ResourceUri(ResourceType.JAR,"file:/tmp/somewhere")));
store.createFunction(func);
Function f=store.getFunction(DB,funcName);
Assert.assertEquals(DB,f.getDbName());
Assert.assertEquals(funcName,f.getFunctionName());
Assert.assertEquals("o.a.h.h.myfunc",f.getClassName());
Assert.assertEquals("me",f.getOwnerName());
Assert.assertEquals(PrincipalType.USER,f.getOwnerType());
Assert.assertTrue(now <= f.getCreateTime());
Assert.assertEquals(FunctionType.JAVA,f.getFunctionType());
Assert.assertEquals(1,f.getResourceUrisSize());
Assert.assertEquals(ResourceType.JAR,f.getResourceUris().get(0).getResourceType());
Assert.assertEquals("file:/tmp/somewhere",f.getResourceUris().get(0).getUri());
}
InternalCallVerifier EqualityVerifier
@Test public void alterTable() throws Exception {
String tableName="alttable";
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
Table table=new Table(tableName,"default","me",startTime,startTime,0,sd,null,emptyParameters,null,null,null);
store.createTable(table);
startTime+=10;
table.setLastAccessTime(startTime);
store.alterTable("default",tableName,table);
Table t=store.getTable("default",tableName);
Assert.assertEquals(1,t.getSd().getColsSize());
Assert.assertEquals("col1",t.getSd().getCols().get(0).getName());
Assert.assertEquals("int",t.getSd().getCols().get(0).getType());
Assert.assertEquals("nocomment",t.getSd().getCols().get(0).getComment());
Assert.assertEquals("serde",t.getSd().getSerdeInfo().getName());
Assert.assertEquals("seriallib",t.getSd().getSerdeInfo().getSerializationLib());
Assert.assertEquals("file:/tmp",t.getSd().getLocation());
Assert.assertEquals("input",t.getSd().getInputFormat());
Assert.assertEquals("output",t.getSd().getOutputFormat());
Assert.assertEquals("me",t.getOwner());
Assert.assertEquals("default",t.getDbName());
Assert.assertEquals(tableName,t.getTableName());
Assert.assertEquals(startTime,t.getLastAccessTime());
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void skewInfo() throws Exception {
String tableName="mytable";
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int",""));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",true,0,serde,null,null,emptyParameters);
Map,String> map=new HashMap,String>();
map.put(Arrays.asList("col3"),"col4");
SkewedInfo skew=new SkewedInfo(Arrays.asList("col1"),Arrays.asList(Arrays.asList("col2")),map);
sd.setSkewedInfo(skew);
Table table=new Table(tableName,"default","me",startTime,startTime,0,sd,null,emptyParameters,null,null,null);
store.createTable(table);
Table t=store.getTable("default",tableName);
Assert.assertEquals(1,t.getSd().getColsSize());
Assert.assertEquals("col1",t.getSd().getCols().get(0).getName());
Assert.assertEquals("int",t.getSd().getCols().get(0).getType());
Assert.assertEquals("",t.getSd().getCols().get(0).getComment());
Assert.assertEquals("serde",t.getSd().getSerdeInfo().getName());
Assert.assertEquals("seriallib",t.getSd().getSerdeInfo().getSerializationLib());
Assert.assertEquals("file:/tmp",t.getSd().getLocation());
Assert.assertEquals("input",t.getSd().getInputFormat());
Assert.assertEquals("output",t.getSd().getOutputFormat());
Assert.assertTrue(t.getSd().isCompressed());
Assert.assertEquals(0,t.getSd().getNumBuckets());
Assert.assertEquals(0,t.getSd().getSortColsSize());
Assert.assertEquals("me",t.getOwner());
Assert.assertEquals("default",t.getDbName());
Assert.assertEquals(tableName,t.getTableName());
Assert.assertEquals(0,t.getParametersSize());
skew=t.getSd().getSkewedInfo();
Assert.assertNotNull(skew);
Assert.assertEquals(1,skew.getSkewedColNamesSize());
Assert.assertEquals("col1",skew.getSkewedColNames().get(0));
Assert.assertEquals(1,skew.getSkewedColValuesSize());
Assert.assertEquals("col2",skew.getSkewedColValues().get(0).get(0));
Assert.assertEquals(1,skew.getSkewedColValueLocationMapsSize());
Assert.assertEquals("col4",skew.getSkewedColValueLocationMaps().get(Arrays.asList("col3")));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void doublePartitionStatistics() throws Exception {
createMockTableAndPartition(DOUBLE_TYPE,DOUBLE_VAL);
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=getMockPartColStatsDesc(PART_KEY,DOUBLE_VAL);
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=doubleColStatsObjs.get(0);
DoubleColumnStatsData doubleData=obj.getStatsData().getDoubleStats();
stats.addToStatsObj(obj);
List parVals=new ArrayList();
parVals.add(DOUBLE_VAL);
store.updatePartitionColumnStatistics(stats,parVals);
List partNames=new ArrayList();
partNames.add(desc.getPartName());
List colNames=new ArrayList();
colNames.add(obj.getColName());
List statsFromDB=store.getPartitionColumnStatistics(DB,TBL,partNames,colNames);
Assert.assertEquals(1,statsFromDB.size());
Assert.assertEquals(desc.getLastAnalyzed(),statsFromDB.get(0).getStatsDesc().getLastAnalyzed());
Assert.assertEquals(DB,statsFromDB.get(0).getStatsDesc().getDbName());
Assert.assertEquals(TBL,statsFromDB.get(0).getStatsDesc().getTableName());
Assert.assertFalse(statsFromDB.get(0).getStatsDesc().isIsTblLevel());
Assert.assertEquals(1,statsFromDB.get(0).getStatsObjSize());
ColumnStatisticsObj objFromDB=statsFromDB.get(0).getStatsObj().get(0);
ColumnStatisticsData dataFromDB=objFromDB.getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.DOUBLE_STATS,dataFromDB.getSetField());
DoubleColumnStatsData doubleDataFromDB=dataFromDB.getDoubleStats();
Assert.assertEquals(doubleData.getHighValue(),doubleDataFromDB.getHighValue(),0.01);
Assert.assertEquals(doubleData.getLowValue(),doubleDataFromDB.getLowValue(),0.01);
Assert.assertEquals(doubleData.getNumNulls(),doubleDataFromDB.getNumNulls());
Assert.assertEquals(doubleData.getNumDVs(),doubleDataFromDB.getNumDVs());
}
InternalCallVerifier EqualityVerifier
@Test public void alterFunction() throws Exception {
String funcName="alterfunc";
int now=(int)(System.currentTimeMillis() / 1000);
List uris=new ArrayList();
uris.add(new ResourceUri(ResourceType.FILE,"whatever"));
Function func=new Function(funcName,DB,"o.a.h.h.myfunc","me",PrincipalType.USER,now,FunctionType.JAVA,uris);
store.createFunction(func);
Function f=store.getFunction(DB,funcName);
Assert.assertEquals(ResourceType.FILE,f.getResourceUris().get(0).getResourceType());
func.addToResourceUris(new ResourceUri(ResourceType.ARCHIVE,"file"));
store.alterFunction(DB,funcName,func);
f=store.getFunction(DB,funcName);
Assert.assertEquals(2,f.getResourceUrisSize());
Assert.assertEquals(ResourceType.FILE,f.getResourceUris().get(0).getResourceType());
Assert.assertEquals(ResourceType.ARCHIVE,f.getResourceUris().get(1).getResourceType());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void binaryTableStatistics() throws Exception {
createMockTable(BINARY_TYPE);
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=getMockTblColStatsDesc();
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=binaryColStatsObjs.get(0);
BinaryColumnStatsData binaryData=obj.getStatsData().getBinaryStats();
stats.addToStatsObj(obj);
store.updateTableColumnStatistics(stats);
ColumnStatistics statsFromDB=store.getTableColumnStatistics(DB,TBL,Arrays.asList(BINARY_COL));
Assert.assertEquals(desc.getLastAnalyzed(),statsFromDB.getStatsDesc().getLastAnalyzed());
Assert.assertEquals(DB,statsFromDB.getStatsDesc().getDbName());
Assert.assertEquals(TBL,statsFromDB.getStatsDesc().getTableName());
Assert.assertTrue(statsFromDB.getStatsDesc().isIsTblLevel());
Assert.assertEquals(1,statsFromDB.getStatsObjSize());
ColumnStatisticsObj objFromDB=statsFromDB.getStatsObj().get(0);
ColumnStatisticsData dataFromDB=objFromDB.getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.BINARY_STATS,dataFromDB.getSetField());
BinaryColumnStatsData binaryDataFromDB=dataFromDB.getBinaryStats();
Assert.assertEquals(binaryData.getMaxColLen(),binaryDataFromDB.getMaxColLen());
Assert.assertEquals(binaryData.getAvgColLen(),binaryDataFromDB.getAvgColLen(),0.01);
Assert.assertEquals(binaryData.getNumNulls(),binaryDataFromDB.getNumNulls());
}
InternalCallVerifier EqualityVerifier
@Test public void alterDb() throws Exception {
String dbname="mydb";
Database db=new Database(dbname,"no description","file:///tmp",emptyParameters);
store.createDatabase(db);
db.setDescription("a description");
store.alterDatabase(dbname,db);
Database d=store.getDatabase(dbname);
Assert.assertEquals(dbname,d.getName());
Assert.assertEquals("a description",d.getDescription());
Assert.assertEquals("file:///tmp",d.getLocationUri());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void booleanPartitionStatistics() throws Exception {
createMockTableAndPartition(BOOLEAN_TYPE,BOOLEAN_VAL);
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=getMockPartColStatsDesc(PART_KEY,BOOLEAN_VAL);
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=booleanColStatsObjs.get(0);
BooleanColumnStatsData boolData=obj.getStatsData().getBooleanStats();
stats.addToStatsObj(obj);
List parVals=new ArrayList();
parVals.add(BOOLEAN_VAL);
store.updatePartitionColumnStatistics(stats,parVals);
List partNames=new ArrayList();
partNames.add(desc.getPartName());
List colNames=new ArrayList();
colNames.add(obj.getColName());
List statsFromDB=store.getPartitionColumnStatistics(DB,TBL,partNames,colNames);
Assert.assertEquals(1,statsFromDB.size());
Assert.assertEquals(desc.getLastAnalyzed(),statsFromDB.get(0).getStatsDesc().getLastAnalyzed());
Assert.assertEquals(DB,statsFromDB.get(0).getStatsDesc().getDbName());
Assert.assertEquals(TBL,statsFromDB.get(0).getStatsDesc().getTableName());
Assert.assertFalse(statsFromDB.get(0).getStatsDesc().isIsTblLevel());
Assert.assertEquals(1,statsFromDB.get(0).getStatsObjSize());
ColumnStatisticsObj objFromDB=statsFromDB.get(0).getStatsObj().get(0);
ColumnStatisticsData dataFromDB=objFromDB.getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.BOOLEAN_STATS,dataFromDB.getSetField());
BooleanColumnStatsData boolDataFromDB=dataFromDB.getBooleanStats();
Assert.assertEquals(boolData.getNumTrues(),boolDataFromDB.getNumTrues());
Assert.assertEquals(boolData.getNumFalses(),boolDataFromDB.getNumFalses());
Assert.assertEquals(boolData.getNumNulls(),boolDataFromDB.getNumNulls());
}
Class: org.apache.hadoop.hive.metastore.hbase.TestHBaseStoreBitVector APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void decimalPartitionStatistics() throws Exception {
createMockTableAndPartition(DECIMAL_TYPE,DECIMAL_VAL);
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=getMockPartColStatsDesc(PART_KEY,DECIMAL_VAL);
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=decimalColStatsObjs.get(0);
DecimalColumnStatsData decimalData=obj.getStatsData().getDecimalStats();
stats.addToStatsObj(obj);
List parVals=new ArrayList();
parVals.add(DECIMAL_VAL);
store.updatePartitionColumnStatistics(stats,parVals);
List partNames=new ArrayList();
partNames.add(desc.getPartName());
List colNames=new ArrayList();
colNames.add(obj.getColName());
List statsFromDB=store.getPartitionColumnStatistics(DB,TBL,partNames,colNames);
Assert.assertEquals(1,statsFromDB.size());
Assert.assertEquals(desc.getLastAnalyzed(),statsFromDB.get(0).getStatsDesc().getLastAnalyzed());
Assert.assertEquals(DB,statsFromDB.get(0).getStatsDesc().getDbName());
Assert.assertEquals(TBL,statsFromDB.get(0).getStatsDesc().getTableName());
Assert.assertFalse(statsFromDB.get(0).getStatsDesc().isIsTblLevel());
Assert.assertEquals(1,statsFromDB.get(0).getStatsObjSize());
ColumnStatisticsObj objFromDB=statsFromDB.get(0).getStatsObj().get(0);
ColumnStatisticsData dataFromDB=objFromDB.getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.DECIMAL_STATS,dataFromDB.getSetField());
DecimalColumnStatsData decimalDataFromDB=dataFromDB.getDecimalStats();
Assert.assertEquals(decimalData.getHighValue(),decimalDataFromDB.getHighValue());
Assert.assertEquals(decimalData.getLowValue(),decimalDataFromDB.getLowValue());
Assert.assertEquals(decimalData.getNumNulls(),decimalDataFromDB.getNumNulls());
Assert.assertEquals(decimalData.getNumDVs(),decimalDataFromDB.getNumDVs());
Assert.assertEquals(decimalData.getBitVectors(),decimalDataFromDB.getBitVectors());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void longTableStatistics() throws Exception {
createMockTable(LONG_COL,LONG_TYPE);
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=getMockTblColStatsDesc();
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=longColStatsObjs.get(0);
LongColumnStatsData longData=obj.getStatsData().getLongStats();
stats.addToStatsObj(obj);
store.updateTableColumnStatistics(stats);
ColumnStatistics statsFromDB=store.getTableColumnStatistics(DB,TBL,Arrays.asList(LONG_COL));
Assert.assertEquals(desc.getLastAnalyzed(),statsFromDB.getStatsDesc().getLastAnalyzed());
Assert.assertEquals(DB,statsFromDB.getStatsDesc().getDbName());
Assert.assertEquals(TBL,statsFromDB.getStatsDesc().getTableName());
Assert.assertTrue(statsFromDB.getStatsDesc().isIsTblLevel());
Assert.assertEquals(1,statsFromDB.getStatsObjSize());
ColumnStatisticsObj objFromDB=statsFromDB.getStatsObj().get(0);
ColumnStatisticsData dataFromDB=objFromDB.getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.LONG_STATS,dataFromDB.getSetField());
LongColumnStatsData longDataFromDB=dataFromDB.getLongStats();
Assert.assertEquals(longData.getHighValue(),longDataFromDB.getHighValue());
Assert.assertEquals(longData.getLowValue(),longDataFromDB.getLowValue());
Assert.assertEquals(longData.getNumNulls(),longDataFromDB.getNumNulls());
Assert.assertEquals(longData.getNumDVs(),longDataFromDB.getNumDVs());
Assert.assertEquals(longData.getBitVectors(),longDataFromDB.getBitVectors());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void doubleTableStatistics() throws Exception {
createMockTable(DOUBLE_COL,DOUBLE_TYPE);
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=getMockTblColStatsDesc();
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=doubleColStatsObjs.get(0);
DoubleColumnStatsData doubleData=obj.getStatsData().getDoubleStats();
stats.addToStatsObj(obj);
store.updateTableColumnStatistics(stats);
ColumnStatistics statsFromDB=store.getTableColumnStatistics(DB,TBL,Arrays.asList(DOUBLE_COL));
Assert.assertEquals(desc.getLastAnalyzed(),statsFromDB.getStatsDesc().getLastAnalyzed());
Assert.assertEquals(DB,statsFromDB.getStatsDesc().getDbName());
Assert.assertEquals(TBL,statsFromDB.getStatsDesc().getTableName());
Assert.assertTrue(statsFromDB.getStatsDesc().isIsTblLevel());
Assert.assertEquals(1,statsFromDB.getStatsObjSize());
ColumnStatisticsObj objFromDB=statsFromDB.getStatsObj().get(0);
ColumnStatisticsData dataFromDB=objFromDB.getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.DOUBLE_STATS,dataFromDB.getSetField());
DoubleColumnStatsData doubleDataFromDB=dataFromDB.getDoubleStats();
Assert.assertEquals(doubleData.getHighValue(),doubleDataFromDB.getHighValue(),0.01);
Assert.assertEquals(doubleData.getLowValue(),doubleDataFromDB.getLowValue(),0.01);
Assert.assertEquals(doubleData.getNumNulls(),doubleDataFromDB.getNumNulls());
Assert.assertEquals(doubleData.getNumDVs(),doubleDataFromDB.getNumDVs());
Assert.assertEquals(doubleData.getBitVectors(),doubleDataFromDB.getBitVectors());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void decimalTableStatistics() throws Exception {
createMockTable(DECIMAL_COL,DECIMAL_TYPE);
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=getMockTblColStatsDesc();
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=decimalColStatsObjs.get(0);
DecimalColumnStatsData decimalData=obj.getStatsData().getDecimalStats();
stats.addToStatsObj(obj);
store.updateTableColumnStatistics(stats);
ColumnStatistics statsFromDB=store.getTableColumnStatistics(DB,TBL,Arrays.asList(DECIMAL_COL));
Assert.assertEquals(desc.getLastAnalyzed(),statsFromDB.getStatsDesc().getLastAnalyzed());
Assert.assertEquals(DB,statsFromDB.getStatsDesc().getDbName());
Assert.assertEquals(TBL,statsFromDB.getStatsDesc().getTableName());
Assert.assertTrue(statsFromDB.getStatsDesc().isIsTblLevel());
Assert.assertEquals(1,statsFromDB.getStatsObjSize());
ColumnStatisticsObj objFromDB=statsFromDB.getStatsObj().get(0);
ColumnStatisticsData dataFromDB=objFromDB.getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.DECIMAL_STATS,dataFromDB.getSetField());
DecimalColumnStatsData decimalDataFromDB=dataFromDB.getDecimalStats();
Assert.assertEquals(decimalData.getHighValue(),decimalDataFromDB.getHighValue());
Assert.assertEquals(decimalData.getLowValue(),decimalDataFromDB.getLowValue());
Assert.assertEquals(decimalData.getNumNulls(),decimalDataFromDB.getNumNulls());
Assert.assertEquals(decimalData.getNumDVs(),decimalDataFromDB.getNumDVs());
Assert.assertEquals(decimalData.getBitVectors(),decimalDataFromDB.getBitVectors());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void longPartitionStatistics() throws Exception {
createMockTableAndPartition(INT_TYPE,INT_VAL);
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=getMockPartColStatsDesc(PART_KEY,INT_VAL);
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=longColStatsObjs.get(0);
LongColumnStatsData longData=obj.getStatsData().getLongStats();
stats.addToStatsObj(obj);
List parVals=new ArrayList();
parVals.add(INT_VAL);
store.updatePartitionColumnStatistics(stats,parVals);
List partNames=new ArrayList();
partNames.add(desc.getPartName());
List colNames=new ArrayList();
colNames.add(obj.getColName());
List statsFromDB=store.getPartitionColumnStatistics(DB,TBL,partNames,colNames);
Assert.assertEquals(1,statsFromDB.size());
Assert.assertEquals(desc.getLastAnalyzed(),statsFromDB.get(0).getStatsDesc().getLastAnalyzed());
Assert.assertEquals(DB,statsFromDB.get(0).getStatsDesc().getDbName());
Assert.assertEquals(TBL,statsFromDB.get(0).getStatsDesc().getTableName());
Assert.assertFalse(statsFromDB.get(0).getStatsDesc().isIsTblLevel());
Assert.assertEquals(1,statsFromDB.get(0).getStatsObjSize());
ColumnStatisticsObj objFromDB=statsFromDB.get(0).getStatsObj().get(0);
ColumnStatisticsData dataFromDB=objFromDB.getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.LONG_STATS,dataFromDB.getSetField());
LongColumnStatsData longDataFromDB=dataFromDB.getLongStats();
Assert.assertEquals(longData.getHighValue(),longDataFromDB.getHighValue());
Assert.assertEquals(longData.getLowValue(),longDataFromDB.getLowValue());
Assert.assertEquals(longData.getNumNulls(),longDataFromDB.getNumNulls());
Assert.assertEquals(longData.getNumDVs(),longDataFromDB.getNumDVs());
Assert.assertEquals(longData.getBitVectors(),longDataFromDB.getBitVectors());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void stringTableStatistics() throws Exception {
createMockTable(STRING_COL,STRING_TYPE);
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=getMockTblColStatsDesc();
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=stringColStatsObjs.get(0);
StringColumnStatsData stringData=obj.getStatsData().getStringStats();
stats.addToStatsObj(obj);
store.updateTableColumnStatistics(stats);
ColumnStatistics statsFromDB=store.getTableColumnStatistics(DB,TBL,Arrays.asList(STRING_COL));
Assert.assertEquals(desc.getLastAnalyzed(),statsFromDB.getStatsDesc().getLastAnalyzed());
Assert.assertEquals(DB,statsFromDB.getStatsDesc().getDbName());
Assert.assertEquals(TBL,statsFromDB.getStatsDesc().getTableName());
Assert.assertTrue(statsFromDB.getStatsDesc().isIsTblLevel());
Assert.assertEquals(1,statsFromDB.getStatsObjSize());
ColumnStatisticsObj objFromDB=statsFromDB.getStatsObj().get(0);
ColumnStatisticsData dataFromDB=objFromDB.getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.STRING_STATS,dataFromDB.getSetField());
StringColumnStatsData stringDataFromDB=dataFromDB.getStringStats();
Assert.assertEquals(stringData.getMaxColLen(),stringDataFromDB.getMaxColLen());
Assert.assertEquals(stringData.getAvgColLen(),stringDataFromDB.getAvgColLen(),0.01);
Assert.assertEquals(stringData.getNumNulls(),stringDataFromDB.getNumNulls());
Assert.assertEquals(stringData.getNumDVs(),stringDataFromDB.getNumDVs());
Assert.assertEquals(stringData.getBitVectors(),stringDataFromDB.getBitVectors());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void doublePartitionStatistics() throws Exception {
createMockTableAndPartition(DOUBLE_TYPE,DOUBLE_VAL);
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=getMockPartColStatsDesc(PART_KEY,DOUBLE_VAL);
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=doubleColStatsObjs.get(0);
DoubleColumnStatsData doubleData=obj.getStatsData().getDoubleStats();
stats.addToStatsObj(obj);
List parVals=new ArrayList();
parVals.add(DOUBLE_VAL);
store.updatePartitionColumnStatistics(stats,parVals);
List partNames=new ArrayList();
partNames.add(desc.getPartName());
List colNames=new ArrayList();
colNames.add(obj.getColName());
List statsFromDB=store.getPartitionColumnStatistics(DB,TBL,partNames,colNames);
Assert.assertEquals(1,statsFromDB.size());
Assert.assertEquals(desc.getLastAnalyzed(),statsFromDB.get(0).getStatsDesc().getLastAnalyzed());
Assert.assertEquals(DB,statsFromDB.get(0).getStatsDesc().getDbName());
Assert.assertEquals(TBL,statsFromDB.get(0).getStatsDesc().getTableName());
Assert.assertFalse(statsFromDB.get(0).getStatsDesc().isIsTblLevel());
Assert.assertEquals(1,statsFromDB.get(0).getStatsObjSize());
ColumnStatisticsObj objFromDB=statsFromDB.get(0).getStatsObj().get(0);
ColumnStatisticsData dataFromDB=objFromDB.getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.DOUBLE_STATS,dataFromDB.getSetField());
DoubleColumnStatsData doubleDataFromDB=dataFromDB.getDoubleStats();
Assert.assertEquals(doubleData.getHighValue(),doubleDataFromDB.getHighValue(),0.01);
Assert.assertEquals(doubleData.getLowValue(),doubleDataFromDB.getLowValue(),0.01);
Assert.assertEquals(doubleData.getNumNulls(),doubleDataFromDB.getNumNulls());
Assert.assertEquals(doubleData.getNumDVs(),doubleDataFromDB.getNumDVs());
Assert.assertEquals(doubleData.getBitVectors(),doubleDataFromDB.getBitVectors());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void stringPartitionStatistics() throws Exception {
createMockTableAndPartition(STRING_TYPE,STRING_VAL);
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=getMockPartColStatsDesc(PART_KEY,STRING_VAL);
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=stringColStatsObjs.get(0);
StringColumnStatsData stringData=obj.getStatsData().getStringStats();
stats.addToStatsObj(obj);
List parVals=new ArrayList();
parVals.add(STRING_VAL);
store.updatePartitionColumnStatistics(stats,parVals);
List partNames=new ArrayList();
partNames.add(desc.getPartName());
List colNames=new ArrayList();
colNames.add(obj.getColName());
List statsFromDB=store.getPartitionColumnStatistics(DB,TBL,partNames,colNames);
Assert.assertEquals(1,statsFromDB.size());
Assert.assertEquals(desc.getLastAnalyzed(),statsFromDB.get(0).getStatsDesc().getLastAnalyzed());
Assert.assertEquals(DB,statsFromDB.get(0).getStatsDesc().getDbName());
Assert.assertEquals(TBL,statsFromDB.get(0).getStatsDesc().getTableName());
Assert.assertFalse(statsFromDB.get(0).getStatsDesc().isIsTblLevel());
Assert.assertEquals(1,statsFromDB.get(0).getStatsObjSize());
ColumnStatisticsObj objFromDB=statsFromDB.get(0).getStatsObj().get(0);
ColumnStatisticsData dataFromDB=objFromDB.getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.STRING_STATS,dataFromDB.getSetField());
StringColumnStatsData stringDataFromDB=dataFromDB.getStringStats();
Assert.assertEquals(stringData.getMaxColLen(),stringDataFromDB.getMaxColLen());
Assert.assertEquals(stringData.getAvgColLen(),stringDataFromDB.getAvgColLen(),0.01);
Assert.assertEquals(stringData.getNumNulls(),stringDataFromDB.getNumNulls());
Assert.assertEquals(stringData.getNumDVs(),stringDataFromDB.getNumDVs());
Assert.assertEquals(stringData.getBitVectors(),stringDataFromDB.getBitVectors());
}
Class: org.apache.hadoop.hive.metastore.hbase.TestHBaseStoreCached InternalCallVerifier EqualityVerifier
@Test public void createPartition() throws Exception {
String dbName="default";
String tableName="myparttable";
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
List partCols=new ArrayList();
partCols.add(new FieldSchema("pc","string",""));
Table table=new Table(tableName,dbName,"me",startTime,startTime,0,sd,partCols,emptyParameters,null,null,null);
store.createTable(table);
List vals=Arrays.asList("fred");
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/pc=fred");
Partition part=new Partition(vals,dbName,tableName,startTime,startTime,psd,emptyParameters);
store.addPartition(part);
Partition p=store.getPartition(dbName,tableName,vals);
Assert.assertEquals(1,p.getSd().getColsSize());
Assert.assertEquals("col1",p.getSd().getCols().get(0).getName());
Assert.assertEquals("int",p.getSd().getCols().get(0).getType());
Assert.assertEquals("nocomment",p.getSd().getCols().get(0).getComment());
Assert.assertEquals("serde",p.getSd().getSerdeInfo().getName());
Assert.assertEquals("seriallib",p.getSd().getSerdeInfo().getSerializationLib());
Assert.assertEquals("file:/tmp/pc=fred",p.getSd().getLocation());
Assert.assertEquals("input",p.getSd().getInputFormat());
Assert.assertEquals("output",p.getSd().getOutputFormat());
Assert.assertEquals(dbName,p.getDbName());
Assert.assertEquals(tableName,p.getTableName());
Assert.assertEquals(1,p.getValuesSize());
Assert.assertEquals("fred",p.getValues().get(0));
}
InternalCallVerifier EqualityVerifier
@Test public void createTable() throws Exception {
String tableName="mytable";
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
Table table=new Table(tableName,"default","me",startTime,startTime,0,sd,null,emptyParameters,null,null,null);
store.createTable(table);
Table t=store.getTable("default",tableName);
Assert.assertEquals(1,t.getSd().getColsSize());
Assert.assertEquals("col1",t.getSd().getCols().get(0).getName());
Assert.assertEquals("int",t.getSd().getCols().get(0).getType());
Assert.assertEquals("nocomment",t.getSd().getCols().get(0).getComment());
Assert.assertEquals("serde",t.getSd().getSerdeInfo().getName());
Assert.assertEquals("seriallib",t.getSd().getSerdeInfo().getSerializationLib());
Assert.assertEquals("file:/tmp",t.getSd().getLocation());
Assert.assertEquals("input",t.getSd().getInputFormat());
Assert.assertEquals("output",t.getSd().getOutputFormat());
Assert.assertEquals("me",t.getOwner());
Assert.assertEquals("default",t.getDbName());
Assert.assertEquals(tableName,t.getTableName());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void listGetDropPartitionNames() throws Exception {
String dbName="default";
String tableName="listParts";
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
List partCols=new ArrayList();
partCols.add(new FieldSchema("pc","string",""));
partCols.add(new FieldSchema("region","string",""));
Table table=new Table(tableName,dbName,"me",startTime,startTime,0,sd,partCols,emptyParameters,null,null,null);
store.createTable(table);
String[][] partVals=new String[][]{{"today","north america"},{"tomorrow","europe"}};
for ( String[] pv : partVals) {
List vals=new ArrayList();
for ( String v : pv) vals.add(v);
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/pc=" + pv[0] + "/region="+ pv[1]);
Partition part=new Partition(vals,dbName,tableName,startTime,startTime,psd,emptyParameters);
store.addPartition(part);
}
List names=store.listPartitionNames(dbName,tableName,(short)-1);
Assert.assertEquals(2,names.size());
String[] resultNames=names.toArray(new String[names.size()]);
Arrays.sort(resultNames);
Assert.assertArrayEquals(resultNames,new String[]{"pc=today/region=north america","pc=tomorrow/region=europe"});
List parts=store.getPartitionsByNames(dbName,tableName,names);
Assert.assertArrayEquals(partVals[0],parts.get(0).getValues().toArray(new String[2]));
Assert.assertArrayEquals(partVals[1],parts.get(1).getValues().toArray(new String[2]));
store.dropPartitions(dbName,tableName,names);
List afterDropParts=store.getPartitions(dbName,tableName,-1);
Assert.assertEquals(0,afterDropParts.size());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void booleanTableStatistics() throws Exception {
long now=System.currentTimeMillis();
String dbname="default";
String tableName="statstable";
String boolcol="boolcol";
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema(boolcol,"boolean","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
Table table=new Table(tableName,dbname,"me",startTime,startTime,0,sd,null,emptyParameters,null,null,null);
store.createTable(table);
long trues=37;
long falses=12;
long booleanNulls=2;
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=new ColumnStatisticsDesc();
desc.setLastAnalyzed(now);
desc.setDbName(dbname);
desc.setTableName(tableName);
desc.setIsTblLevel(true);
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=new ColumnStatisticsObj();
obj.setColName(boolcol);
obj.setColType("boolean");
ColumnStatisticsData data=new ColumnStatisticsData();
BooleanColumnStatsData boolData=new BooleanColumnStatsData();
boolData.setNumTrues(trues);
boolData.setNumFalses(falses);
boolData.setNumNulls(booleanNulls);
data.setBooleanStats(boolData);
obj.setStatsData(data);
stats.addToStatsObj(obj);
store.updateTableColumnStatistics(stats);
stats=store.getTableColumnStatistics(dbname,tableName,Arrays.asList(boolcol));
Assert.assertEquals(now,stats.getStatsDesc().getLastAnalyzed());
Assert.assertEquals(dbname,stats.getStatsDesc().getDbName());
Assert.assertEquals(tableName,stats.getStatsDesc().getTableName());
Assert.assertTrue(stats.getStatsDesc().isIsTblLevel());
Assert.assertEquals(1,stats.getStatsObjSize());
ColumnStatisticsData colData=obj.getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.BOOLEAN_STATS,colData.getSetField());
boolData=colData.getBooleanStats();
Assert.assertEquals(trues,boolData.getNumTrues());
Assert.assertEquals(falses,boolData.getNumFalses());
Assert.assertEquals(booleanNulls,boolData.getNumNulls());
}
InternalCallVerifier EqualityVerifier
@Test public void alterTable() throws Exception {
String tableName="alttable";
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
Table table=new Table(tableName,"default","me",startTime,startTime,0,sd,null,emptyParameters,null,null,null);
store.createTable(table);
startTime+=10;
table.setLastAccessTime(startTime);
store.alterTable("default",tableName,table);
Table t=store.getTable("default",tableName);
Assert.assertEquals(1,t.getSd().getColsSize());
Assert.assertEquals("col1",t.getSd().getCols().get(0).getName());
Assert.assertEquals("int",t.getSd().getCols().get(0).getType());
Assert.assertEquals("nocomment",t.getSd().getCols().get(0).getComment());
Assert.assertEquals("serde",t.getSd().getSerdeInfo().getName());
Assert.assertEquals("seriallib",t.getSd().getSerdeInfo().getSerializationLib());
Assert.assertEquals("file:/tmp",t.getSd().getLocation());
Assert.assertEquals("input",t.getSd().getInputFormat());
Assert.assertEquals("output",t.getSd().getOutputFormat());
Assert.assertEquals("me",t.getOwner());
Assert.assertEquals("default",t.getDbName());
Assert.assertEquals(tableName,t.getTableName());
Assert.assertEquals(startTime,t.getLastAccessTime());
}
InternalCallVerifier EqualityVerifier
@Test public void getPartitions() throws Exception {
String dbName="default";
String tableName="manyParts";
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
List partCols=new ArrayList();
partCols.add(new FieldSchema("pc","string",""));
Table table=new Table(tableName,dbName,"me",startTime,startTime,0,sd,partCols,emptyParameters,null,null,null);
store.createTable(table);
List partVals=Arrays.asList("alan","bob","carl","doug","ethan");
for ( String val : partVals) {
List vals=new ArrayList();
vals.add(val);
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/pc=" + val);
Partition part=new Partition(vals,dbName,tableName,startTime,startTime,psd,emptyParameters);
store.addPartition(part);
Partition p=store.getPartition(dbName,tableName,vals);
Assert.assertEquals("file:/tmp/pc=" + val,p.getSd().getLocation());
}
List parts=store.getPartitions(dbName,tableName,-1);
Assert.assertEquals(5,parts.size());
String[] pv=new String[5];
for (int i=0; i < 5; i++) pv[i]=parts.get(i).getValues().get(0);
Arrays.sort(pv);
Assert.assertArrayEquals(pv,partVals.toArray(new String[5]));
}
Class: org.apache.hadoop.hive.metastore.hbase.TestHBaseStoreIntegration APIUtilityVerifier EqualityVerifier
@Test public void getPartitions() throws Exception {
String dbName="default";
String tableName="manyParts";
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
List partCols=new ArrayList();
partCols.add(new FieldSchema("pc","string",""));
Table table=new Table(tableName,dbName,"me",startTime,startTime,0,sd,partCols,emptyParameters,null,null,null);
store.createTable(table);
List partVals=Arrays.asList("alan","bob","carl","doug","ethan");
for ( String val : partVals) {
List vals=new ArrayList();
vals.add(val);
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/pc=" + val);
Partition part=new Partition(vals,dbName,tableName,startTime,startTime,psd,emptyParameters);
store.addPartition(part);
Partition p=store.getPartition(dbName,tableName,vals);
Assert.assertEquals("file:/tmp/pc=" + val,p.getSd().getLocation());
}
List parts=store.getPartitions(dbName,tableName,-1);
Assert.assertEquals(5,parts.size());
String[] pv=new String[5];
for (int i=0; i < 5; i++) pv[i]=parts.get(i).getValues().get(0);
Arrays.sort(pv);
Assert.assertArrayEquals(pv,partVals.toArray(new String[5]));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void tableStatistics() throws Exception {
long now=System.currentTimeMillis();
String dbname="default";
String tableName="statstable";
String boolcol="boolcol";
String longcol="longcol";
String doublecol="doublecol";
String stringcol="stringcol";
String binarycol="bincol";
String decimalcol="deccol";
long trues=37;
long falses=12;
long booleanNulls=2;
long longHigh=120938479124L;
long longLow=-12341243213412124L;
long longNulls=23;
long longDVs=213L;
double doubleHigh=123423.23423;
double doubleLow=0.00001234233;
long doubleNulls=92;
long doubleDVs=1234123421L;
long strMaxLen=1234;
double strAvgLen=32.3;
long strNulls=987;
long strDVs=906;
long binMaxLen=123412987L;
double binAvgLen=76.98;
long binNulls=976998797L;
Decimal decHigh=new Decimal();
decHigh.setScale((short)3);
decHigh.setUnscaled("3876".getBytes());
Decimal decLow=new Decimal();
decLow.setScale((short)3);
decLow.setUnscaled("38".getBytes());
long decNulls=13;
long decDVs=923947293L;
List cols=new ArrayList();
cols.add(new FieldSchema(boolcol,"boolean","nocomment"));
cols.add(new FieldSchema(longcol,"long","nocomment"));
cols.add(new FieldSchema(doublecol,"double","nocomment"));
cols.add(new FieldSchema(stringcol,"varchar(32)","nocomment"));
cols.add(new FieldSchema(binarycol,"binary","nocomment"));
cols.add(new FieldSchema(decimalcol,"decimal(5, 3)","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
Table table=new Table(tableName,dbname,"me",(int)now / 1000,(int)now / 1000,0,sd,null,emptyParameters,null,null,null);
store.createTable(table);
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=new ColumnStatisticsDesc();
desc.setLastAnalyzed(now);
desc.setDbName(dbname);
desc.setTableName(tableName);
desc.setIsTblLevel(true);
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=new ColumnStatisticsObj();
obj.setColName(boolcol);
obj.setColType("boolean");
ColumnStatisticsData data=new ColumnStatisticsData();
BooleanColumnStatsData boolData=new BooleanColumnStatsData();
boolData.setNumTrues(trues);
boolData.setNumFalses(falses);
boolData.setNumNulls(booleanNulls);
data.setBooleanStats(boolData);
obj.setStatsData(data);
stats.addToStatsObj(obj);
obj=new ColumnStatisticsObj();
obj.setColName(longcol);
obj.setColType("long");
data=new ColumnStatisticsData();
LongColumnStatsData longData=new LongColumnStatsData();
longData.setHighValue(longHigh);
longData.setLowValue(longLow);
longData.setNumNulls(longNulls);
longData.setNumDVs(longDVs);
data.setLongStats(longData);
obj.setStatsData(data);
stats.addToStatsObj(obj);
obj=new ColumnStatisticsObj();
obj.setColName(doublecol);
obj.setColType("double");
data=new ColumnStatisticsData();
DoubleColumnStatsData doubleData=new DoubleColumnStatsData();
doubleData.setHighValue(doubleHigh);
doubleData.setLowValue(doubleLow);
doubleData.setNumNulls(doubleNulls);
doubleData.setNumDVs(doubleDVs);
data.setDoubleStats(doubleData);
obj.setStatsData(data);
stats.addToStatsObj(obj);
store.updateTableColumnStatistics(stats);
stats=store.getTableColumnStatistics(dbname,tableName,Arrays.asList(boolcol,longcol,doublecol));
Assert.assertEquals(3,stats.getStatsObjSize());
stats=store.getTableColumnStatistics(dbname,tableName,Arrays.asList(boolcol));
Assert.assertEquals(1,stats.getStatsObjSize());
stats=new ColumnStatistics();
stats.setStatsDesc(desc);
obj=new ColumnStatisticsObj();
obj.setColName(stringcol);
obj.setColType("string");
data=new ColumnStatisticsData();
StringColumnStatsData strData=new StringColumnStatsData();
strData.setMaxColLen(strMaxLen);
strData.setAvgColLen(strAvgLen);
strData.setNumNulls(strNulls);
strData.setNumDVs(strDVs);
data.setStringStats(strData);
obj.setStatsData(data);
stats.addToStatsObj(obj);
obj=new ColumnStatisticsObj();
obj.setColName(binarycol);
obj.setColType("binary");
data=new ColumnStatisticsData();
BinaryColumnStatsData binData=new BinaryColumnStatsData();
binData.setMaxColLen(binMaxLen);
binData.setAvgColLen(binAvgLen);
binData.setNumNulls(binNulls);
data.setBinaryStats(binData);
obj.setStatsData(data);
stats.addToStatsObj(obj);
obj=new ColumnStatisticsObj();
obj.setColName(decimalcol);
obj.setColType("decimal(5,3)");
data=new ColumnStatisticsData();
DecimalColumnStatsData decData=new DecimalColumnStatsData();
LOG.debug("Setting decimal high value to " + decHigh.getScale() + " <"+ new String(decHigh.getUnscaled())+ ">");
decData.setHighValue(decHigh);
decData.setLowValue(decLow);
decData.setNumNulls(decNulls);
decData.setNumDVs(decDVs);
data.setDecimalStats(decData);
obj.setStatsData(data);
stats.addToStatsObj(obj);
store.updateTableColumnStatistics(stats);
stats=store.getTableColumnStatistics(dbname,tableName,Arrays.asList(boolcol,longcol,doublecol,stringcol,binarycol,decimalcol));
Assert.assertEquals(now,stats.getStatsDesc().getLastAnalyzed());
Assert.assertEquals(dbname,stats.getStatsDesc().getDbName());
Assert.assertEquals(tableName,stats.getStatsDesc().getTableName());
Assert.assertTrue(stats.getStatsDesc().isIsTblLevel());
Assert.assertEquals(6,stats.getStatsObjSize());
ColumnStatisticsData colData=stats.getStatsObj().get(0).getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.BOOLEAN_STATS,colData.getSetField());
boolData=colData.getBooleanStats();
Assert.assertEquals(trues,boolData.getNumTrues());
Assert.assertEquals(falses,boolData.getNumFalses());
Assert.assertEquals(booleanNulls,boolData.getNumNulls());
colData=stats.getStatsObj().get(1).getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.LONG_STATS,colData.getSetField());
longData=colData.getLongStats();
Assert.assertEquals(longHigh,longData.getHighValue());
Assert.assertEquals(longLow,longData.getLowValue());
Assert.assertEquals(longNulls,longData.getNumNulls());
Assert.assertEquals(longDVs,longData.getNumDVs());
colData=stats.getStatsObj().get(2).getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.DOUBLE_STATS,colData.getSetField());
doubleData=colData.getDoubleStats();
Assert.assertEquals(doubleHigh,doubleData.getHighValue(),0.01);
Assert.assertEquals(doubleLow,doubleData.getLowValue(),0.01);
Assert.assertEquals(doubleNulls,doubleData.getNumNulls());
Assert.assertEquals(doubleDVs,doubleData.getNumDVs());
colData=stats.getStatsObj().get(3).getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.STRING_STATS,colData.getSetField());
strData=colData.getStringStats();
Assert.assertEquals(strMaxLen,strData.getMaxColLen());
Assert.assertEquals(strAvgLen,strData.getAvgColLen(),0.01);
Assert.assertEquals(strNulls,strData.getNumNulls());
Assert.assertEquals(strDVs,strData.getNumDVs());
colData=stats.getStatsObj().get(4).getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.BINARY_STATS,colData.getSetField());
binData=colData.getBinaryStats();
Assert.assertEquals(binMaxLen,binData.getMaxColLen());
Assert.assertEquals(binAvgLen,binData.getAvgColLen(),0.01);
Assert.assertEquals(binNulls,binData.getNumNulls());
colData=stats.getStatsObj().get(5).getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.DECIMAL_STATS,colData.getSetField());
decData=colData.getDecimalStats();
Assert.assertEquals(decHigh,decData.getHighValue());
Assert.assertEquals(decLow,decData.getLowValue());
Assert.assertEquals(decNulls,decData.getNumNulls());
Assert.assertEquals(decDVs,decData.getNumDVs());
}
APIUtilityVerifier EqualityVerifier
@Test public void listPartitionsWithPs() throws Exception {
String dbName="default";
String tableName="listPartitionsWithPs";
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
List partCols=new ArrayList();
partCols.add(new FieldSchema("ds","string",""));
partCols.add(new FieldSchema("region","string",""));
Table table=new Table(tableName,dbName,"me",startTime,startTime,0,sd,partCols,emptyParameters,null,null,null);
store.createTable(table);
String[][] partVals=new String[][]{{"today","north america"},{"today","europe"},{"tomorrow","north america"},{"tomorrow","europe"}};
for ( String[] pv : partVals) {
List vals=new ArrayList();
for ( String v : pv) vals.add(v);
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/ds=" + pv[0] + "/region="+ pv[1]);
Partition part=new Partition(vals,dbName,tableName,startTime,startTime,psd,emptyParameters);
store.addPartition(part);
}
List partitionNames=store.listPartitionNamesPs(dbName,tableName,Arrays.asList(partVals[0]),(short)-1);
Assert.assertEquals(1,partitionNames.size());
Assert.assertEquals("ds=today/region=north america",partitionNames.get(0));
partitionNames=store.listPartitionNamesPs(dbName,tableName,Arrays.asList(partVals[0][0]),(short)-1);
Assert.assertEquals(2,partitionNames.size());
String[] names=partitionNames.toArray(new String[partitionNames.size()]);
Arrays.sort(names);
Assert.assertArrayEquals(new String[]{"ds=today/region=europe","ds=today/region=north america"},names);
partitionNames=store.listPartitionNamesPs(dbName,tableName,Arrays.asList("today","*"),(short)-1);
Assert.assertEquals(2,partitionNames.size());
names=partitionNames.toArray(new String[partitionNames.size()]);
Arrays.sort(names);
Assert.assertArrayEquals(new String[]{"ds=today/region=europe","ds=today/region=north america"},names);
partitionNames=store.listPartitionNamesPs(dbName,tableName,Arrays.asList("*","europe"),(short)-1);
Assert.assertEquals(2,partitionNames.size());
names=partitionNames.toArray(new String[partitionNames.size()]);
Arrays.sort(names);
Assert.assertArrayEquals(new String[]{"ds=today/region=europe","ds=tomorrow/region=europe"},names);
}
BranchVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void listGlobalGrants() throws Exception {
String[] roleNames=new String[]{"lgg_role1","lgg_role2"};
String[] userNames=new String[]{"merry","pippen"};
store.addRole(roleNames[0],"me");
store.addRole(roleNames[1],"me");
int now=(int)(System.currentTimeMillis() / 1000);
Role role1=store.getRole(roleNames[0]);
Role role2=store.getRole(roleNames[1]);
store.grantRole(role1,userNames[0],PrincipalType.USER,"bob",PrincipalType.USER,false);
store.grantRole(role1,roleNames[1],PrincipalType.ROLE,"admin",PrincipalType.ROLE,true);
store.grantRole(role2,userNames[1],PrincipalType.USER,"bob",PrincipalType.USER,false);
List privileges=new ArrayList();
HiveObjectRef hiveObjRef=new HiveObjectRef(HiveObjectType.GLOBAL,null,null,null,null);
PrivilegeGrantInfo grantInfo=new PrivilegeGrantInfo("read",now,"me",PrincipalType.USER,false);
HiveObjectPrivilege hop=new HiveObjectPrivilege(hiveObjRef,userNames[0],PrincipalType.USER,grantInfo);
privileges.add(hop);
grantInfo=new PrivilegeGrantInfo("write",now,"me",PrincipalType.USER,true);
hop=new HiveObjectPrivilege(hiveObjRef,roleNames[0],PrincipalType.ROLE,grantInfo);
privileges.add(hop);
PrivilegeBag pBag=new PrivilegeBag(privileges);
store.grantPrivileges(pBag);
List hops=store.listPrincipalGlobalGrants(roleNames[0],PrincipalType.ROLE);
Assert.assertEquals(1,hops.size());
Assert.assertEquals(PrincipalType.ROLE,hops.get(0).getPrincipalType());
Assert.assertEquals(HiveObjectType.GLOBAL,hops.get(0).getHiveObject().getObjectType());
Assert.assertEquals("write",hops.get(0).getGrantInfo().getPrivilege());
hops=store.listPrincipalGlobalGrants(userNames[0],PrincipalType.USER);
Assert.assertEquals(1,hops.size());
Assert.assertEquals(PrincipalType.USER,hops.get(0).getPrincipalType());
Assert.assertEquals(HiveObjectType.GLOBAL,hops.get(0).getHiveObject().getObjectType());
Assert.assertEquals("read",hops.get(0).getGrantInfo().getPrivilege());
hops=store.listPrincipalGlobalGrants(roleNames[1],PrincipalType.ROLE);
Assert.assertEquals(0,hops.size());
hops=store.listPrincipalGlobalGrants(userNames[1],PrincipalType.USER);
Assert.assertEquals(0,hops.size());
hops=store.listGlobalGrantsAll();
Assert.assertEquals(2,hops.size());
boolean sawUser=false, sawRole=false;
for ( HiveObjectPrivilege h : hops) {
if (h.getPrincipalName().equals(userNames[0])) {
Assert.assertEquals(PrincipalType.USER,h.getPrincipalType());
Assert.assertEquals(HiveObjectType.GLOBAL,h.getHiveObject().getObjectType());
Assert.assertEquals("read",h.getGrantInfo().getPrivilege());
sawUser=true;
}
else if (h.getPrincipalName().equals(roleNames[0])) {
Assert.assertEquals(PrincipalType.ROLE,h.getPrincipalType());
Assert.assertEquals(HiveObjectType.GLOBAL,h.getHiveObject().getObjectType());
Assert.assertEquals("write",h.getGrantInfo().getPrivilege());
sawRole=true;
}
}
Assert.assertTrue(sawUser && sawRole);
}
APIUtilityVerifier EqualityVerifier
@Test public void userToRoleMapOnDrop() throws Exception {
String roleName1="utrmod1";
store.addRole(roleName1,"me");
String roleName2="utrmod2";
store.addRole(roleName2,"me");
String user1="pebbles";
String user2="bam-bam";
Role role1=store.getRole(roleName1);
Role role2=store.getRole(roleName2);
store.grantRole(role1,user1,PrincipalType.USER,"bob",PrincipalType.USER,false);
store.grantRole(role1,roleName2,PrincipalType.ROLE,"admin",PrincipalType.ROLE,true);
store.grantRole(role1,user2,PrincipalType.USER,"bob",PrincipalType.USER,false);
List roles=HBaseReadWrite.getInstance().getUserRoles(user2);
Assert.assertEquals(2,roles.size());
String[] roleNames=roles.toArray(new String[roles.size()]);
Arrays.sort(roleNames);
Assert.assertArrayEquals(new String[]{roleName1,roleName2},roleNames);
store.removeRole(roleName2);
HBaseReadWrite.setConf(conf);
roles=HBaseReadWrite.getInstance().getUserRoles(user1);
Assert.assertEquals(1,roles.size());
Assert.assertEquals(roleName1,roles.get(0));
roles=HBaseReadWrite.getInstance().getUserRoles(user2);
Assert.assertEquals(1,roles.size());
Assert.assertEquals(roleName1,roles.get(0));
}
APIUtilityVerifier EqualityVerifier
@Test public void dropRole() throws Exception {
String roleName="anotherrole";
store.addRole(roleName,"me");
Role r=store.getRole(roleName);
Assert.assertEquals(roleName,r.getRoleName());
store.removeRole(roleName);
thrown.expect(NoSuchObjectException.class);
store.getRole(roleName);
}
EqualityVerifier NullVerifier HybridVerifier
@Test public void delegationToken() throws Exception {
store.addToken("abc","def");
store.addToken("ghi","jkl");
Assert.assertEquals("def",store.getToken("abc"));
Assert.assertEquals("jkl",store.getToken("ghi"));
Assert.assertNull(store.getToken("wabawaba"));
String[] allToks=store.getAllTokenIdentifiers().toArray(new String[2]);
Arrays.sort(allToks);
Assert.assertArrayEquals(new String[]{"abc","ghi"},allToks);
store.removeToken("abc");
store.removeToken("wabawaba");
Assert.assertNull(store.getToken("abc"));
Assert.assertEquals("jkl",store.getToken("ghi"));
allToks=store.getAllTokenIdentifiers().toArray(new String[1]);
Assert.assertArrayEquals(new String[]{"ghi"},allToks);
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void alterTable() throws Exception {
String tableName="alttable";
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
Table table=new Table(tableName,"default","me",startTime,startTime,0,sd,null,emptyParameters,null,null,null);
store.createTable(table);
startTime+=10;
table.setLastAccessTime(startTime);
LOG.debug("XXX alter table test");
store.alterTable("default",tableName,table);
Table t=store.getTable("default",tableName);
LOG.debug("Alter table time " + t.getLastAccessTime());
Assert.assertEquals(1,t.getSd().getColsSize());
Assert.assertEquals("col1",t.getSd().getCols().get(0).getName());
Assert.assertEquals("int",t.getSd().getCols().get(0).getType());
Assert.assertEquals("nocomment",t.getSd().getCols().get(0).getComment());
Assert.assertEquals("serde",t.getSd().getSerdeInfo().getName());
Assert.assertEquals("seriallib",t.getSd().getSerdeInfo().getSerializationLib());
Assert.assertEquals("file:/tmp",t.getSd().getLocation());
Assert.assertEquals("input",t.getSd().getInputFormat());
Assert.assertEquals("output",t.getSd().getOutputFormat());
Assert.assertEquals("me",t.getOwner());
Assert.assertEquals("default",t.getDbName());
Assert.assertEquals(tableName,t.getTableName());
Assert.assertEquals(startTime,t.getLastAccessTime());
}
InternalCallVerifier EqualityVerifier
@Test public void createDb() throws Exception {
String dbname="mydb";
Database db=new Database(dbname,"no description","file:///tmp",emptyParameters);
store.createDatabase(db);
Database d=store.getDatabase("mydb");
Assert.assertEquals(dbname,d.getName());
Assert.assertEquals("no description",d.getDescription());
Assert.assertEquals("file:///tmp",d.getLocationUri());
}
APIUtilityVerifier EqualityVerifier
@Test public void getDbsRegex() throws Exception {
String[] dbNames=new String[3];
for (int i=0; i < dbNames.length; i++) {
dbNames[i]="db" + i;
Database db=new Database(dbNames[i],"no description","file:///tmp",emptyParameters);
store.createDatabase(db);
}
List dbs=store.getDatabases("db1|db2");
Assert.assertEquals(2,dbs.size());
String[] namesFromStore=dbs.toArray(new String[2]);
Arrays.sort(namesFromStore);
Assert.assertArrayEquals(Arrays.copyOfRange(dbNames,1,3),namesFromStore);
dbs=store.getDatabases("db*");
Assert.assertEquals(3,dbs.size());
namesFromStore=dbs.toArray(new String[3]);
Arrays.sort(namesFromStore);
Assert.assertArrayEquals(dbNames,namesFromStore);
}
InternalCallVerifier EqualityVerifier
@Test public void createTable() throws Exception {
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
Table table=new Table("mytable","default","me",startTime,startTime,0,sd,null,emptyParameters,null,null,null);
store.createTable(table);
Table t=store.getTable("default","mytable");
Assert.assertEquals(1,t.getSd().getColsSize());
Assert.assertEquals("col1",t.getSd().getCols().get(0).getName());
Assert.assertEquals("int",t.getSd().getCols().get(0).getType());
Assert.assertEquals("nocomment",t.getSd().getCols().get(0).getComment());
Assert.assertEquals("serde",t.getSd().getSerdeInfo().getName());
Assert.assertEquals("seriallib",t.getSd().getSerdeInfo().getSerializationLib());
Assert.assertEquals("file:/tmp",t.getSd().getLocation());
Assert.assertEquals("input",t.getSd().getInputFormat());
Assert.assertEquals("output",t.getSd().getOutputFormat());
Assert.assertEquals("me",t.getOwner());
Assert.assertEquals("default",t.getDbName());
Assert.assertEquals("mytable",t.getTableName());
}
APIUtilityVerifier EqualityVerifier
@Test public void getFuncsRegex() throws Exception {
String dbname="default";
int now=(int)(System.currentTimeMillis() / 1000);
String[] funcNames=new String[3];
for (int i=0; i < funcNames.length; i++) {
funcNames[i]="func" + i;
store.createFunction(new Function(funcNames[i],dbname,"o.a.h.h.myfunc","me",PrincipalType.USER,now,FunctionType.JAVA,Arrays.asList(new ResourceUri(ResourceType.JAR,"file:/tmp/somewhere"))));
}
List funcs=store.getFunctions(dbname,"func1|func2");
Assert.assertEquals(2,funcs.size());
String[] namesFromStore=funcs.toArray(new String[2]);
Arrays.sort(namesFromStore);
Assert.assertArrayEquals(Arrays.copyOfRange(funcNames,1,3),namesFromStore);
funcs=store.getFunctions(dbname,"func*");
Assert.assertEquals(3,funcs.size());
namesFromStore=funcs.toArray(new String[3]);
Arrays.sort(namesFromStore);
Assert.assertArrayEquals(funcNames,namesFromStore);
funcs=store.getFunctions("nosuchdb","func*");
Assert.assertEquals(0,funcs.size());
}
APIUtilityVerifier BranchVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void listTableGrants() throws Exception {
String dbName="ltg_db";
String[] tableNames=new String[]{"ltg_t1","ltg_t2"};
try {
Database db=new Database(dbName,"no description","file:///tmp",emptyParameters);
store.createDatabase(db);
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
Table table=new Table(tableNames[0],dbName,"me",startTime,startTime,0,sd,null,emptyParameters,null,null,null);
store.createTable(table);
table=new Table(tableNames[1],dbName,"me",startTime,startTime,0,sd,null,emptyParameters,null,null,null);
store.createTable(table);
String[] roleNames=new String[]{"ltg_role1","ltg_role2"};
String[] userNames=new String[]{"gandalf","radagast"};
store.addRole(roleNames[0],"me");
store.addRole(roleNames[1],"me");
int now=(int)(System.currentTimeMillis() / 1000);
Role role1=store.getRole(roleNames[0]);
Role role2=store.getRole(roleNames[1]);
store.grantRole(role1,userNames[0],PrincipalType.USER,"bob",PrincipalType.USER,false);
store.grantRole(role1,roleNames[1],PrincipalType.ROLE,"admin",PrincipalType.ROLE,true);
store.grantRole(role2,userNames[1],PrincipalType.USER,"bob",PrincipalType.USER,false);
List privileges=new ArrayList();
HiveObjectRef hiveObjRef=new HiveObjectRef(HiveObjectType.TABLE,dbName,tableNames[0],null,null);
PrivilegeGrantInfo grantInfo=new PrivilegeGrantInfo("read",now,"me",PrincipalType.USER,false);
HiveObjectPrivilege hop=new HiveObjectPrivilege(hiveObjRef,userNames[0],PrincipalType.USER,grantInfo);
privileges.add(hop);
grantInfo=new PrivilegeGrantInfo("write",now,"me",PrincipalType.USER,true);
hop=new HiveObjectPrivilege(hiveObjRef,roleNames[0],PrincipalType.ROLE,grantInfo);
privileges.add(hop);
PrivilegeBag pBag=new PrivilegeBag(privileges);
store.grantPrivileges(pBag);
List hops=store.listAllTableGrants(roleNames[0],PrincipalType.ROLE,dbName,tableNames[0]);
Assert.assertEquals(1,hops.size());
Assert.assertEquals(PrincipalType.ROLE,hops.get(0).getPrincipalType());
Assert.assertEquals(HiveObjectType.TABLE,hops.get(0).getHiveObject().getObjectType());
Assert.assertEquals("write",hops.get(0).getGrantInfo().getPrivilege());
hops=store.listAllTableGrants(userNames[0],PrincipalType.USER,dbName,tableNames[0]);
Assert.assertEquals(1,hops.size());
Assert.assertEquals(PrincipalType.USER,hops.get(0).getPrincipalType());
Assert.assertEquals(HiveObjectType.TABLE,hops.get(0).getHiveObject().getObjectType());
Assert.assertEquals("read",hops.get(0).getGrantInfo().getPrivilege());
hops=store.listAllTableGrants(roleNames[1],PrincipalType.ROLE,dbName,tableNames[0]);
Assert.assertEquals(0,hops.size());
hops=store.listAllTableGrants(userNames[1],PrincipalType.USER,dbName,tableNames[0]);
Assert.assertEquals(0,hops.size());
hops=store.listAllTableGrants(roleNames[0],PrincipalType.ROLE,dbName,tableNames[1]);
Assert.assertEquals(0,hops.size());
hops=store.listAllTableGrants(userNames[0],PrincipalType.USER,dbName,tableNames[1]);
Assert.assertEquals(0,hops.size());
hops=store.listTableGrantsAll(dbName,tableNames[0]);
Assert.assertEquals(2,hops.size());
boolean sawUser=false, sawRole=false;
for ( HiveObjectPrivilege h : hops) {
if (h.getPrincipalName().equals(userNames[0])) {
Assert.assertEquals(PrincipalType.USER,h.getPrincipalType());
Assert.assertEquals(HiveObjectType.TABLE,h.getHiveObject().getObjectType());
Assert.assertEquals("read",h.getGrantInfo().getPrivilege());
sawUser=true;
}
else if (h.getPrincipalName().equals(roleNames[0])) {
Assert.assertEquals(PrincipalType.ROLE,h.getPrincipalType());
Assert.assertEquals(HiveObjectType.TABLE,h.getHiveObject().getObjectType());
Assert.assertEquals("write",h.getGrantInfo().getPrivilege());
sawRole=true;
}
}
Assert.assertTrue(sawUser && sawRole);
hops=store.listPrincipalTableGrantsAll(roleNames[0],PrincipalType.ROLE);
Assert.assertEquals(1,hops.size());
Assert.assertEquals(PrincipalType.ROLE,hops.get(0).getPrincipalType());
Assert.assertEquals(HiveObjectType.TABLE,hops.get(0).getHiveObject().getObjectType());
Assert.assertEquals("write",hops.get(0).getGrantInfo().getPrivilege());
hops=store.listPrincipalTableGrantsAll(userNames[0],PrincipalType.USER);
Assert.assertEquals(1,hops.size());
Assert.assertEquals(PrincipalType.USER,hops.get(0).getPrincipalType());
Assert.assertEquals(HiveObjectType.TABLE,hops.get(0).getHiveObject().getObjectType());
Assert.assertEquals("read",hops.get(0).getGrantInfo().getPrivilege());
hops=store.listPrincipalDBGrantsAll(roleNames[1],PrincipalType.ROLE);
Assert.assertEquals(0,hops.size());
hops=store.listPrincipalDBGrantsAll(userNames[1],PrincipalType.USER);
Assert.assertEquals(0,hops.size());
}
finally {
store.dropTable(dbName,tableNames[0]);
store.dropTable(dbName,tableNames[1]);
store.dropDatabase(dbName);
}
}
APIUtilityVerifier EqualityVerifier
@Test public void addPartitions() throws Exception {
String dbName="default";
String tableName="addParts";
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
List partCols=new ArrayList();
partCols.add(new FieldSchema("pc","string",""));
Table table=new Table(tableName,dbName,"me",startTime,startTime,0,sd,partCols,emptyParameters,null,null,null);
store.createTable(table);
List partVals=Arrays.asList("alan","bob","carl","doug","ethan");
List partitions=new ArrayList();
for ( String val : partVals) {
List vals=new ArrayList();
vals.add(val);
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/pc=" + val);
Partition part=new Partition(vals,dbName,tableName,startTime,startTime,psd,emptyParameters);
partitions.add(part);
}
store.addPartitions(dbName,tableName,partitions);
List partNames=store.listPartitionNames(dbName,tableName,(short)-1);
Assert.assertEquals(5,partNames.size());
String[] names=partNames.toArray(new String[partNames.size()]);
Arrays.sort(names);
String[] canonicalNames=partVals.toArray(new String[partVals.size()]);
for (int i=0; i < canonicalNames.length; i++) canonicalNames[i]="pc=" + canonicalNames[i];
Assert.assertArrayEquals(canonicalNames,names);
}
APIUtilityVerifier EqualityVerifier
@Test public void getAllTables() throws Exception {
String dbNames[]=new String[]{"db0","db1"};
String tableNames[]=new String[]{"curly","larry","moe"};
for (int i=0; i < dbNames.length; i++) {
store.createDatabase(new Database(dbNames[i],"no description","file:///tmp",emptyParameters));
}
for (int i=0; i < dbNames.length; i++) {
for (int j=0; j < tableNames.length; j++) {
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
Table table=new Table(tableNames[j],dbNames[i],"me",startTime,startTime,0,sd,null,emptyParameters,null,null,null);
store.createTable(table);
}
}
List fetchedNames=store.getAllTables(dbNames[0]);
Assert.assertEquals(3,fetchedNames.size());
String[] sortedFetchedNames=fetchedNames.toArray(new String[fetchedNames.size()]);
Arrays.sort(sortedFetchedNames);
Assert.assertArrayEquals(tableNames,sortedFetchedNames);
List regexNames=store.getTables(dbNames[0],"*y");
Assert.assertEquals(2,regexNames.size());
String[] sortedRegexNames=regexNames.toArray(new String[regexNames.size()]);
Arrays.sort(sortedRegexNames);
Assert.assertArrayEquals(Arrays.copyOfRange(tableNames,0,2),sortedRegexNames);
List fetchedTables=store.getTableObjectsByName(dbNames[1],Arrays.asList(Arrays.copyOfRange(tableNames,1,3)));
Assert.assertEquals(2,fetchedTables.size());
sortedFetchedNames=new String[fetchedTables.size()];
for (int i=0; i < fetchedTables.size(); i++) {
sortedFetchedNames[i]=fetchedTables.get(i).getTableName();
}
Arrays.sort(sortedFetchedNames);
Assert.assertArrayEquals(Arrays.copyOfRange(tableNames,1,3),sortedFetchedNames);
}
APIUtilityVerifier IterativeVerifier EqualityVerifier
@Test public void partitionStatistics() throws Exception {
long now=System.currentTimeMillis();
String dbname="default";
String tableName="statspart";
String[] partNames={"ds=today","ds=yesterday"};
String[] partVals={"today","yesterday"};
String boolcol="boolcol";
String longcol="longcol";
String doublecol="doublecol";
String stringcol="stringcol";
String binarycol="bincol";
String decimalcol="deccol";
long trues=37;
long falses=12;
long booleanNulls=2;
long strMaxLen=1234;
double strAvgLen=32.3;
long strNulls=987;
long strDVs=906;
List cols=new ArrayList();
cols.add(new FieldSchema(boolcol,"boolean","nocomment"));
cols.add(new FieldSchema(longcol,"long","nocomment"));
cols.add(new FieldSchema(doublecol,"double","nocomment"));
cols.add(new FieldSchema(stringcol,"varchar(32)","nocomment"));
cols.add(new FieldSchema(binarycol,"binary","nocomment"));
cols.add(new FieldSchema(decimalcol,"decimal(5, 3)","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
List partCols=new ArrayList();
partCols.add(new FieldSchema("ds","string",""));
Table table=new Table(tableName,dbname,"me",(int)now / 1000,(int)now / 1000,0,sd,partCols,emptyParameters,null,null,null);
store.createTable(table);
for ( String partVal : partVals) {
Partition part=new Partition(Arrays.asList(partVal),dbname,tableName,(int)now / 1000,(int)now / 1000,sd,emptyParameters);
store.addPartition(part);
}
for (int i=0; i < partNames.length; i++) {
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=new ColumnStatisticsDesc();
desc.setLastAnalyzed(now);
desc.setDbName(dbname);
desc.setTableName(tableName);
desc.setIsTblLevel(false);
desc.setPartName(partNames[i]);
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=new ColumnStatisticsObj();
obj.setColName(boolcol);
obj.setColType("boolean");
ColumnStatisticsData data=new ColumnStatisticsData();
BooleanColumnStatsData boolData=new BooleanColumnStatsData();
boolData.setNumTrues(trues);
boolData.setNumFalses(falses);
boolData.setNumNulls(booleanNulls);
data.setBooleanStats(boolData);
obj.setStatsData(data);
stats.addToStatsObj(obj);
store.updatePartitionColumnStatistics(stats,Arrays.asList(partVals[i]));
}
List statsList=store.getPartitionColumnStatistics(dbname,tableName,Arrays.asList(partNames),Arrays.asList(boolcol));
Assert.assertEquals(2,statsList.size());
for (int i=0; i < partNames.length; i++) {
Assert.assertEquals(1,statsList.get(i).getStatsObjSize());
}
for (int i=0; i < partNames.length; i++) {
ColumnStatistics stats=new ColumnStatistics();
ColumnStatisticsDesc desc=new ColumnStatisticsDesc();
desc.setLastAnalyzed(now);
desc.setDbName(dbname);
desc.setTableName(tableName);
desc.setIsTblLevel(false);
desc.setPartName(partNames[i]);
stats.setStatsDesc(desc);
ColumnStatisticsObj obj=new ColumnStatisticsObj();
obj.setColName(stringcol);
obj.setColType("string");
ColumnStatisticsData data=new ColumnStatisticsData();
StringColumnStatsData strData=new StringColumnStatsData();
strData.setMaxColLen(strMaxLen);
strData.setAvgColLen(strAvgLen);
strData.setNumNulls(strNulls);
strData.setNumDVs(strDVs);
data.setStringStats(strData);
obj.setStatsData(data);
stats.addToStatsObj(obj);
store.updatePartitionColumnStatistics(stats,Arrays.asList(partVals[i]));
}
statsList=store.getPartitionColumnStatistics(dbname,tableName,Arrays.asList(partNames),Arrays.asList(boolcol));
Assert.assertEquals(2,statsList.size());
for (int i=0; i < partNames.length; i++) {
Assert.assertEquals(1,statsList.get(i).getStatsObjSize());
}
statsList=store.getPartitionColumnStatistics(dbname,tableName,Arrays.asList(partNames),Arrays.asList(boolcol,stringcol));
Assert.assertEquals(2,statsList.size());
for (int i=0; i < partNames.length; i++) {
Assert.assertEquals(2,statsList.get(i).getStatsObjSize());
Assert.assertEquals(booleanNulls,statsList.get(i).getStatsObj().get(0).getStatsData().getBooleanStats().getNumNulls());
Assert.assertEquals(strDVs,statsList.get(i).getStatsObj().get(1).getStatsData().getStringStats().getNumDVs());
}
}
EqualityVerifier
@Test public void alterPartitions() throws Exception {
String dbName="default";
String tableName="alterParts";
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
List partCols=new ArrayList();
partCols.add(new FieldSchema("pc","string",""));
Table table=new Table(tableName,dbName,"me",startTime,startTime,0,sd,partCols,emptyParameters,null,null,null);
store.createTable(table);
List partVals=Arrays.asList("alan","bob","carl","doug","ethan");
List partitions=new ArrayList();
List> allVals=new ArrayList>();
for ( String val : partVals) {
List vals=new ArrayList();
allVals.add(vals);
vals.add(val);
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/pc=" + val);
Partition part=new Partition(vals,dbName,tableName,startTime,startTime,psd,emptyParameters);
partitions.add(part);
}
store.addPartitions(dbName,tableName,partitions);
for ( Partition p : partitions) p.setLastAccessTime(startTime + 10);
store.alterPartitions(dbName,tableName,allVals,partitions);
partitions=store.getPartitions(dbName,tableName,-1);
for ( Partition part : partitions) {
Assert.assertEquals(startTime + 10,part.getLastAccessTime());
}
}
APIUtilityVerifier EqualityVerifier
@Test public void userToRoleMap() throws Exception {
String roleName1="utrm1";
store.addRole(roleName1,"me");
String roleName2="utrm2";
store.addRole(roleName2,"me");
String user1="wilma";
String user2="betty";
Role role1=store.getRole(roleName1);
Role role2=store.getRole(roleName2);
store.grantRole(role1,user1,PrincipalType.USER,"bob",PrincipalType.USER,false);
store.grantRole(role1,roleName2,PrincipalType.ROLE,"admin",PrincipalType.ROLE,true);
List roles=HBaseReadWrite.getInstance().getUserRoles(user1);
Assert.assertEquals(2,roles.size());
String[] roleNames=roles.toArray(new String[roles.size()]);
Arrays.sort(roleNames);
Assert.assertArrayEquals(new String[]{roleName1,roleName2},roleNames);
store.grantRole(role2,user1,PrincipalType.USER,"admin",PrincipalType.ROLE,false);
store.grantRole(role1,user2,PrincipalType.USER,"bob",PrincipalType.USER,false);
HBaseReadWrite.setConf(conf);
roles=HBaseReadWrite.getInstance().getUserRoles(user2);
Assert.assertEquals(2,roles.size());
roleNames=roles.toArray(new String[roles.size()]);
Arrays.sort(roleNames);
Assert.assertArrayEquals(new String[]{roleName1,roleName2},roleNames);
store.revokeRole(role1,roleName2,PrincipalType.ROLE,false);
roles=HBaseReadWrite.getInstance().getUserRoles(user1);
Assert.assertEquals(2,roles.size());
roleNames=roles.toArray(new String[roles.size()]);
Arrays.sort(roleNames);
Assert.assertArrayEquals(new String[]{roleName1,roleName2},roleNames);
roles=HBaseReadWrite.getInstance().getUserRoles(user2);
Assert.assertEquals(1,roles.size());
Assert.assertEquals(roleName1,roles.get(0));
}
BranchVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void listDbGrants() throws Exception {
String dbNames[]=new String[]{"ldbg_db1","ldbg_db2"};
try {
Database db=new Database(dbNames[0],"no description","file:///tmp",emptyParameters);
store.createDatabase(db);
db=new Database(dbNames[1],"no description","file:///tmp",emptyParameters);
store.createDatabase(db);
String[] roleNames=new String[]{"ldbg_role1","ldbg_role2"};
String[] userNames=new String[]{"frodo","sam"};
store.addRole(roleNames[0],"me");
store.addRole(roleNames[1],"me");
int now=(int)(System.currentTimeMillis() / 1000);
Role role1=store.getRole(roleNames[0]);
Role role2=store.getRole(roleNames[1]);
store.grantRole(role1,userNames[0],PrincipalType.USER,"bob",PrincipalType.USER,false);
store.grantRole(role1,roleNames[1],PrincipalType.ROLE,"admin",PrincipalType.ROLE,true);
store.grantRole(role2,userNames[1],PrincipalType.USER,"bob",PrincipalType.USER,false);
List privileges=new ArrayList();
HiveObjectRef hiveObjRef=new HiveObjectRef(HiveObjectType.DATABASE,dbNames[0],null,null,null);
PrivilegeGrantInfo grantInfo=new PrivilegeGrantInfo("read",now,"me",PrincipalType.USER,false);
HiveObjectPrivilege hop=new HiveObjectPrivilege(hiveObjRef,userNames[0],PrincipalType.USER,grantInfo);
privileges.add(hop);
grantInfo=new PrivilegeGrantInfo("write",now,"me",PrincipalType.USER,true);
hop=new HiveObjectPrivilege(hiveObjRef,roleNames[0],PrincipalType.ROLE,grantInfo);
privileges.add(hop);
PrivilegeBag pBag=new PrivilegeBag(privileges);
store.grantPrivileges(pBag);
List hops=store.listPrincipalDBGrants(roleNames[0],PrincipalType.ROLE,dbNames[0]);
Assert.assertEquals(1,hops.size());
Assert.assertEquals(PrincipalType.ROLE,hops.get(0).getPrincipalType());
Assert.assertEquals(HiveObjectType.DATABASE,hops.get(0).getHiveObject().getObjectType());
Assert.assertEquals("write",hops.get(0).getGrantInfo().getPrivilege());
hops=store.listPrincipalDBGrants(userNames[0],PrincipalType.USER,dbNames[0]);
Assert.assertEquals(1,hops.size());
Assert.assertEquals(PrincipalType.USER,hops.get(0).getPrincipalType());
Assert.assertEquals(HiveObjectType.DATABASE,hops.get(0).getHiveObject().getObjectType());
Assert.assertEquals("read",hops.get(0).getGrantInfo().getPrivilege());
hops=store.listPrincipalDBGrants(roleNames[1],PrincipalType.ROLE,dbNames[0]);
Assert.assertEquals(0,hops.size());
hops=store.listPrincipalDBGrants(userNames[1],PrincipalType.USER,dbNames[0]);
Assert.assertEquals(0,hops.size());
hops=store.listPrincipalDBGrants(roleNames[0],PrincipalType.ROLE,dbNames[1]);
Assert.assertEquals(0,hops.size());
hops=store.listPrincipalDBGrants(userNames[0],PrincipalType.USER,dbNames[1]);
Assert.assertEquals(0,hops.size());
hops=store.listDBGrantsAll(dbNames[0]);
Assert.assertEquals(2,hops.size());
boolean sawUser=false, sawRole=false;
for ( HiveObjectPrivilege h : hops) {
if (h.getPrincipalName().equals(userNames[0])) {
Assert.assertEquals(PrincipalType.USER,h.getPrincipalType());
Assert.assertEquals(HiveObjectType.DATABASE,h.getHiveObject().getObjectType());
Assert.assertEquals("read",h.getGrantInfo().getPrivilege());
sawUser=true;
}
else if (h.getPrincipalName().equals(roleNames[0])) {
Assert.assertEquals(PrincipalType.ROLE,h.getPrincipalType());
Assert.assertEquals(HiveObjectType.DATABASE,h.getHiveObject().getObjectType());
Assert.assertEquals("write",h.getGrantInfo().getPrivilege());
sawRole=true;
}
}
Assert.assertTrue(sawUser && sawRole);
hops=store.listPrincipalDBGrantsAll(roleNames[0],PrincipalType.ROLE);
Assert.assertEquals(1,hops.size());
Assert.assertEquals(PrincipalType.ROLE,hops.get(0).getPrincipalType());
Assert.assertEquals(HiveObjectType.DATABASE,hops.get(0).getHiveObject().getObjectType());
Assert.assertEquals("write",hops.get(0).getGrantInfo().getPrivilege());
hops=store.listPrincipalDBGrantsAll(userNames[0],PrincipalType.USER);
Assert.assertEquals(1,hops.size());
Assert.assertEquals(PrincipalType.USER,hops.get(0).getPrincipalType());
Assert.assertEquals(HiveObjectType.DATABASE,hops.get(0).getHiveObject().getObjectType());
Assert.assertEquals("read",hops.get(0).getGrantInfo().getPrivilege());
hops=store.listPrincipalDBGrantsAll(roleNames[1],PrincipalType.ROLE);
Assert.assertEquals(0,hops.size());
hops=store.listPrincipalDBGrantsAll(userNames[1],PrincipalType.USER);
Assert.assertEquals(0,hops.size());
}
finally {
store.dropDatabase(dbNames[0]);
store.dropDatabase(dbNames[1]);
}
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void createPartition() throws Exception {
String dbName="default";
String tableName="myparttable";
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
List partCols=new ArrayList();
partCols.add(new FieldSchema("pc","string",""));
Table table=new Table(tableName,dbName,"me",startTime,startTime,0,sd,partCols,emptyParameters,null,null,null);
store.createTable(table);
List vals=new ArrayList();
vals.add("fred");
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/pc=fred");
Partition part=new Partition(vals,dbName,tableName,startTime,startTime,psd,emptyParameters);
store.addPartition(part);
Partition p=store.getPartition(dbName,tableName,vals);
Assert.assertEquals(1,p.getSd().getColsSize());
Assert.assertEquals("col1",p.getSd().getCols().get(0).getName());
Assert.assertEquals("int",p.getSd().getCols().get(0).getType());
Assert.assertEquals("nocomment",p.getSd().getCols().get(0).getComment());
Assert.assertEquals("serde",p.getSd().getSerdeInfo().getName());
Assert.assertEquals("seriallib",p.getSd().getSerdeInfo().getSerializationLib());
Assert.assertEquals("file:/tmp/pc=fred",p.getSd().getLocation());
Assert.assertEquals("input",p.getSd().getInputFormat());
Assert.assertEquals("output",p.getSd().getOutputFormat());
Assert.assertEquals(dbName,p.getDbName());
Assert.assertEquals(tableName,p.getTableName());
Assert.assertEquals(1,p.getValuesSize());
Assert.assertEquals("fred",p.getValues().get(0));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void createRole() throws Exception {
int now=(int)System.currentTimeMillis() / 1000;
String roleName="myrole";
store.addRole(roleName,"me");
Role r=store.getRole(roleName);
Assert.assertEquals(roleName,r.getRoleName());
Assert.assertEquals("me",r.getOwnerName());
Assert.assertTrue(now <= r.getCreateTime());
}
APIUtilityVerifier EqualityVerifier
@Test public void getAllDbs() throws Exception {
String[] dbNames=new String[3];
for (int i=0; i < dbNames.length; i++) {
dbNames[i]="db" + i;
Database db=new Database(dbNames[i],"no description","file:///tmp",emptyParameters);
store.createDatabase(db);
}
List dbs=store.getAllDatabases();
Assert.assertEquals(3,dbs.size());
String[] namesFromStore=dbs.toArray(new String[3]);
Arrays.sort(namesFromStore);
Assert.assertArrayEquals(dbNames,namesFromStore);
}
APIUtilityVerifier BranchVerifier UtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void grantRevokeRoles() throws Exception {
int now=(int)(System.currentTimeMillis() / 1000);
String roleName1="role1";
store.addRole(roleName1,"me");
String roleName2="role2";
store.addRole(roleName2,"me");
Role role1=store.getRole(roleName1);
Role role2=store.getRole(roleName2);
store.grantRole(role1,"fred",PrincipalType.USER,"bob",PrincipalType.USER,false);
store.grantRole(role2,roleName1,PrincipalType.ROLE,"admin",PrincipalType.ROLE,true);
store.grantRole(role2,"fred",PrincipalType.USER,"admin",PrincipalType.ROLE,false);
List roles=store.listRoles("fred",PrincipalType.USER);
Assert.assertEquals(3,roles.size());
boolean sawRole1=false, sawRole2=false, sawPublic=false;
for ( Role role : roles) {
if (role.getRoleName().equals(roleName1)) {
sawRole1=true;
}
else if (role.getRoleName().equals(roleName2)) {
sawRole2=true;
}
else if (role.getRoleName().equals(HiveMetaStore.PUBLIC)) {
sawPublic=true;
}
else {
Assert.fail("Unknown role name " + role.getRoleName());
}
}
Assert.assertTrue(sawRole1 && sawRole2 && sawPublic);
roles=store.listRoles("fred",PrincipalType.ROLE);
Assert.assertEquals(0,roles.size());
roles=store.listRoles(roleName1,PrincipalType.ROLE);
Assert.assertEquals(1,roles.size());
Role role=roles.get(0);
Assert.assertEquals(roleName2,role.getRoleName());
List grants=store.listRoleMembers(roleName1);
Assert.assertEquals(1,grants.size());
Assert.assertEquals("fred",grants.get(0).getPrincipalName());
Assert.assertEquals(PrincipalType.USER,grants.get(0).getPrincipalType());
Assert.assertTrue("Expected grant time of " + now + " got "+ grants.get(0).getGrantTime(),grants.get(0).getGrantTime() >= now);
Assert.assertEquals("bob",grants.get(0).getGrantorName());
Assert.assertEquals(PrincipalType.USER,grants.get(0).getGrantorPrincipalType());
Assert.assertFalse(grants.get(0).isGrantOption());
grants=store.listRoleMembers(roleName2);
Assert.assertEquals(2,grants.size());
boolean sawFred=false;
sawRole1=false;
for ( RolePrincipalGrant m : grants) {
if ("fred".equals(m.getPrincipalName())) sawFred=true;
else if (roleName1.equals(m.getPrincipalName())) sawRole1=true;
else Assert.fail("Unexpected principal " + m.getPrincipalName());
}
Assert.assertTrue(sawFred && sawRole1);
store.revokeRole(role2,roleName1,PrincipalType.ROLE,true);
roles=store.listRoles(roleName1,PrincipalType.ROLE);
Assert.assertEquals(1,roles.size());
Assert.assertEquals(roleName2,roles.get(0).getRoleName());
grants=store.listRoleMembers(roleName1);
Assert.assertFalse(grants.get(0).isGrantOption());
store.removeRole(roleName1);
roles=store.listRoles("fred",PrincipalType.USER);
Assert.assertEquals(2,roles.size());
sawRole2=sawPublic=false;
for ( Role m : roles) {
if (m.getRoleName().equals(roleName2)) sawRole2=true;
else if (m.getRoleName().equals(HiveMetaStore.PUBLIC)) sawPublic=true;
else Assert.fail("Unknown role " + m.getRoleName());
}
Assert.assertTrue(sawRole2 && sawPublic);
roles=store.listRoles(roleName1,PrincipalType.ROLE);
Assert.assertEquals(0,roles.size());
store.revokeRole(role2,"fred",PrincipalType.USER,false);
roles=store.listRoles("fred",PrincipalType.USER);
Assert.assertEquals(1,roles.size());
Assert.assertEquals(HiveMetaStore.PUBLIC,roles.get(0).getRoleName());
}
APIUtilityVerifier EqualityVerifier
@Test public void listPartitions() throws Exception {
String dbName="default";
String tableName="listParts";
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
List partCols=new ArrayList();
partCols.add(new FieldSchema("pc","string",""));
partCols.add(new FieldSchema("region","string",""));
Table table=new Table(tableName,dbName,"me",startTime,startTime,0,sd,partCols,emptyParameters,null,null,null);
store.createTable(table);
String[][] partVals=new String[][]{{"today","north america"},{"tomorrow","europe"}};
for ( String[] pv : partVals) {
List vals=new ArrayList();
for ( String v : pv) vals.add(v);
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/pc=" + pv[0] + "/region="+ pv[1]);
Partition part=new Partition(vals,dbName,tableName,startTime,startTime,psd,emptyParameters);
store.addPartition(part);
}
List names=store.listPartitionNames(dbName,tableName,(short)-1);
Assert.assertEquals(2,names.size());
String[] resultNames=names.toArray(new String[names.size()]);
Arrays.sort(resultNames);
Assert.assertArrayEquals(resultNames,new String[]{"pc=today/region=north america","pc=tomorrow/region=europe"});
List parts=store.getPartitionsByNames(dbName,tableName,names);
Assert.assertArrayEquals(partVals[0],parts.get(0).getValues().toArray(new String[2]));
Assert.assertArrayEquals(partVals[1],parts.get(1).getValues().toArray(new String[2]));
store.dropPartitions(dbName,tableName,names);
List afterDropParts=store.getPartitions(dbName,tableName,-1);
Assert.assertEquals(0,afterDropParts.size());
}
EqualityVerifier
@Test public void masterKey() throws Exception {
Assert.assertEquals(0,store.addMasterKey("k1"));
Assert.assertEquals(1,store.addMasterKey("k2"));
String[] keys=store.getMasterKeys();
Arrays.sort(keys);
Assert.assertArrayEquals(new String[]{"k1","k2"},keys);
store.updateMasterKey(0,"k3");
keys=store.getMasterKeys();
Arrays.sort(keys);
Assert.assertArrayEquals(new String[]{"k2","k3"},keys);
store.removeMasterKey(1);
keys=store.getMasterKeys();
Assert.assertArrayEquals(new String[]{"k3"},keys);
thrown.expect(NoSuchObjectException.class);
store.updateMasterKey(72,"whatever");
}
Class: org.apache.hadoop.hive.metastore.hbase.TestSharedStorageDescriptor BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void changeOrder(){
StorageDescriptor sd=new StorageDescriptor();
sd.addToSortCols(new Order("fred",1));
SharedStorageDescriptor ssd=new SharedStorageDescriptor();
ssd.setShared(sd);
ssd.getSortCols().get(0).setOrder(2);
Assert.assertFalse(sd.getSortCols() == ssd.getSortCols());
Assert.assertEquals(2,ssd.getSortCols().get(0).getOrder());
Assert.assertEquals(1,sd.getSortCols().get(0).getOrder());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void colIterator(){
StorageDescriptor sd=new StorageDescriptor();
sd.addToCols(new FieldSchema("fred","string",""));
SharedStorageDescriptor ssd=new SharedStorageDescriptor();
ssd.setShared(sd);
Iterator iter=ssd.getColsIterator();
Assert.assertTrue(iter.hasNext());
Assert.assertEquals("fred",iter.next().getName());
Assert.assertFalse(sd.getCols() == ssd.getCols());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void changeBucketList(){
StorageDescriptor sd=new StorageDescriptor();
sd.addToBucketCols(new String("fred"));
SharedStorageDescriptor ssd=new SharedStorageDescriptor();
ssd.setShared(sd);
List list=ssd.getBucketCols();
list.add(new String("bob"));
Assert.assertFalse(sd.getBucketCols() == ssd.getBucketCols());
Assert.assertEquals(2,ssd.getBucketColsSize());
Assert.assertEquals(1,sd.getBucketColsSize());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void addToColList(){
StorageDescriptor sd=new StorageDescriptor();
sd.addToCols(new FieldSchema("fred","string",""));
SharedStorageDescriptor ssd=new SharedStorageDescriptor();
ssd.setShared(sd);
ssd.addToCols(new FieldSchema("joe","int",""));
Assert.assertFalse(sd.getCols() == ssd.getCols());
Assert.assertEquals(2,ssd.getColsSize());
Assert.assertEquals(1,sd.getColsSize());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void changeOnSerde(){
StorageDescriptor sd=new StorageDescriptor();
SerDeInfo serde=new SerDeInfo();
serde.setName("serde");
sd.setSerdeInfo(serde);
SharedStorageDescriptor ssd=new SharedStorageDescriptor();
ssd.setShared(sd);
ssd.getSerdeInfo().setName("different");
Assert.assertFalse(sd.getSerdeInfo() == ssd.getSerdeInfo());
Assert.assertEquals("serde",serde.getName());
Assert.assertEquals("different",ssd.getSerdeInfo().getName());
Assert.assertEquals("serde",sd.getSerdeInfo().getName());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void unsetOrder(){
StorageDescriptor sd=new StorageDescriptor();
sd.addToSortCols(new Order("fred",1));
SharedStorageDescriptor ssd=new SharedStorageDescriptor();
ssd.setShared(sd);
ssd.unsetSortCols();
Assert.assertFalse(sd.getSortCols() == ssd.getSortCols());
Assert.assertEquals(0,ssd.getSortColsSize());
Assert.assertEquals(1,sd.getSortColsSize());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void setReadOnly(){
StorageDescriptor sd=new StorageDescriptor();
sd.addToCols(new FieldSchema("fred","string",""));
SharedStorageDescriptor ssd=new SharedStorageDescriptor();
ssd.setShared(sd);
ssd.setReadOnly();
List cols=ssd.getCols();
Assert.assertEquals(1,cols.size());
Assert.assertTrue(sd.getCols() == ssd.getCols());
}
Class: org.apache.hadoop.hive.metastore.hbase.TestStorageDescriptorSharing APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void createManyPartitions() throws Exception {
String dbName="default";
String tableName="manyParts";
int startTime=(int)(System.currentTimeMillis() / 1000);
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
List partCols=new ArrayList();
partCols.add(new FieldSchema("pc","string",""));
Table table=new Table(tableName,dbName,"me",startTime,startTime,0,sd,partCols,emptyParameters,null,null,null);
store.createTable(table);
List partVals=Arrays.asList("alan","bob","carl","doug","ethan");
for ( String val : partVals) {
List vals=new ArrayList();
vals.add(val);
StorageDescriptor psd=new StorageDescriptor(sd);
psd.setLocation("file:/tmp/pc=" + val);
Partition part=new Partition(vals,dbName,tableName,startTime,startTime,psd,emptyParameters);
store.addPartition(part);
Partition p=store.getPartition(dbName,tableName,vals);
Assert.assertEquals("file:/tmp/pc=" + val,p.getSd().getLocation());
}
Assert.assertEquals(1,HBaseReadWrite.getInstance().countStorageDescriptor());
String tableName2="differentTable";
sd=new StorageDescriptor(cols,"file:/tmp","input2","output",false,0,serde,null,null,emptyParameters);
table=new Table(tableName2,"default","me",startTime,startTime,0,sd,null,emptyParameters,null,null,null);
store.createTable(table);
Assert.assertEquals(2,HBaseReadWrite.getInstance().countStorageDescriptor());
store.dropPartition(dbName,tableName,Arrays.asList(partVals.get(0)));
Assert.assertEquals(2,HBaseReadWrite.getInstance().countStorageDescriptor());
table=store.getTable(dbName,tableName2);
byte[] sdHash=HBaseUtils.hashStorageDescriptor(table.getSd(),md);
table.setLastAccessTime(startTime + 1);
store.alterTable(dbName,tableName2,table);
Assert.assertEquals(2,HBaseReadWrite.getInstance().countStorageDescriptor());
table=store.getTable(dbName,tableName2);
byte[] alteredHash=HBaseUtils.hashStorageDescriptor(table.getSd(),md);
Assert.assertArrayEquals(sdHash,alteredHash);
table.getSd().setOutputFormat("output_changed");
store.alterTable(dbName,tableName2,table);
Assert.assertEquals(2,HBaseReadWrite.getInstance().countStorageDescriptor());
table=store.getTable(dbName,tableName2);
alteredHash=HBaseUtils.hashStorageDescriptor(table.getSd(),md);
Assert.assertFalse(Arrays.equals(sdHash,alteredHash));
Partition part=store.getPartition(dbName,tableName,Arrays.asList(partVals.get(1)));
sdHash=HBaseUtils.hashStorageDescriptor(part.getSd(),md);
part.setLastAccessTime(part.getLastAccessTime() + 1);
store.alterPartition(dbName,tableName,Arrays.asList(partVals.get(1)),part);
Assert.assertEquals(2,HBaseReadWrite.getInstance().countStorageDescriptor());
part=store.getPartition(dbName,tableName,Arrays.asList(partVals.get(1)));
alteredHash=HBaseUtils.hashStorageDescriptor(part.getSd(),md);
Assert.assertArrayEquals(sdHash,alteredHash);
part.getSd().setOutputFormat("output_changed_some_more");
store.alterPartition(dbName,tableName,Arrays.asList(partVals.get(1)),part);
Assert.assertEquals(3,HBaseReadWrite.getInstance().countStorageDescriptor());
part=store.getPartition(dbName,tableName,Arrays.asList(partVals.get(1)));
alteredHash=HBaseUtils.hashStorageDescriptor(part.getSd(),md);
Assert.assertFalse(Arrays.equals(sdHash,alteredHash));
List parts=store.getPartitions(dbName,tableName,-1);
sdHash=HBaseUtils.hashStorageDescriptor(parts.get(1).getSd(),md);
for (int i=1; i < 3; i++) {
parts.get(i).setLastAccessTime(97);
}
List> listPartVals=new ArrayList>();
for ( String pv : partVals.subList(1,partVals.size())) {
listPartVals.add(Arrays.asList(pv));
}
store.alterPartitions(dbName,tableName,listPartVals,parts);
Assert.assertEquals(3,HBaseReadWrite.getInstance().countStorageDescriptor());
parts=store.getPartitions(dbName,tableName,-1);
alteredHash=HBaseUtils.hashStorageDescriptor(parts.get(1).getSd(),md);
Assert.assertArrayEquals(sdHash,alteredHash);
parts=store.getPartitions(dbName,tableName,-1);
sdHash=HBaseUtils.hashStorageDescriptor(parts.get(1).getSd(),md);
for (int i=1; i < 3; i++) {
parts.get(i).getSd().setOutputFormat("yet_a_different_of");
}
store.alterPartitions(dbName,tableName,listPartVals,parts);
Assert.assertEquals(4,HBaseReadWrite.getInstance().countStorageDescriptor());
parts=store.getPartitions(dbName,tableName,-1);
alteredHash=HBaseUtils.hashStorageDescriptor(parts.get(1).getSd(),md);
Assert.assertFalse(Arrays.equals(sdHash,alteredHash));
for ( String partVal : partVals.subList(1,partVals.size())) {
store.dropPartition(dbName,tableName,Arrays.asList(partVal));
}
store.dropTable(dbName,tableName);
store.dropTable(dbName,tableName2);
Assert.assertEquals(0,HBaseReadWrite.getInstance().countStorageDescriptor());
}
Class: org.apache.hadoop.hive.metastore.txn.TestCompactionTxnHandler APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testMarkCompacted() throws Exception {
CompactionRequest rqst=new CompactionRequest("foo","bar",CompactionType.MINOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
CompactionInfo ci=txnHandler.findNextToCompact("fred");
assertNotNull(ci);
txnHandler.markCompacted(ci);
assertNull(txnHandler.findNextToCompact("fred"));
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
assertEquals(1,compacts.size());
ShowCompactResponseElement c=compacts.get(0);
assertEquals("foo",c.getDbname());
assertEquals("bar",c.getTablename());
assertEquals("ds=today",c.getPartitionname());
assertEquals(CompactionType.MINOR,c.getType());
assertEquals("ready for cleaning",c.getState());
assertNull(c.getWorkerid());
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testMarkCleanedCleansTxnsAndTxnComponents() throws Exception {
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
assertTrue(res.getState() == LockState.ACQUIRED);
txnHandler.abortTxn(new AbortTxnRequest(txnid));
txnid=openTxn();
comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
comp.setTablename("yourtable");
components=new ArrayList(1);
components.add(comp);
req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
res=txnHandler.lock(req);
assertTrue(res.getState() == LockState.ACQUIRED);
txnHandler.abortTxn(new AbortTxnRequest(txnid));
txnid=openTxn();
comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
comp.setTablename("foo");
comp.setPartitionname("bar");
components=new ArrayList(1);
components.add(comp);
req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
res=txnHandler.lock(req);
assertTrue(res.getState() == LockState.ACQUIRED);
comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
comp.setTablename("foo");
comp.setPartitionname("baz");
components=new ArrayList(1);
components.add(comp);
req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
res=txnHandler.lock(req);
assertTrue(res.getState() == LockState.ACQUIRED);
txnHandler.abortTxn(new AbortTxnRequest(txnid));
CompactionInfo ci=new CompactionInfo();
CompactionRequest rqst=new CompactionRequest("mydb","mytable",CompactionType.MAJOR);
txnHandler.compact(rqst);
assertEquals(0,txnHandler.findReadyToClean().size());
ci=txnHandler.findNextToCompact("fred");
assertNotNull(ci);
txnHandler.markCompacted(ci);
List toClean=txnHandler.findReadyToClean();
assertEquals(1,toClean.size());
txnHandler.markCleaned(ci);
GetOpenTxnsResponse txnList=txnHandler.getOpenTxns();
assertEquals(3,txnList.getOpen_txnsSize());
txnHandler.cleanEmptyAbortedTxns();
txnList=txnHandler.getOpenTxns();
assertEquals(2,txnList.getOpen_txnsSize());
rqst=new CompactionRequest("mydb","foo",CompactionType.MAJOR);
rqst.setPartitionname("bar");
txnHandler.compact(rqst);
assertEquals(0,txnHandler.findReadyToClean().size());
ci=txnHandler.findNextToCompact("fred");
assertNotNull(ci);
txnHandler.markCompacted(ci);
toClean=txnHandler.findReadyToClean();
assertEquals(1,toClean.size());
txnHandler.markCleaned(ci);
txnHandler.openTxns(new OpenTxnRequest(1,"me","localhost"));
txnHandler.cleanEmptyAbortedTxns();
txnList=txnHandler.getOpenTxns();
assertEquals(3,txnList.getOpen_txnsSize());
}
BranchVerifier UtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testRevokeFromLocalWorkers() throws Exception {
CompactionRequest rqst=new CompactionRequest("foo","bar",CompactionType.MINOR);
txnHandler.compact(rqst);
rqst=new CompactionRequest("foo","baz",CompactionType.MINOR);
txnHandler.compact(rqst);
rqst=new CompactionRequest("foo","bazzoo",CompactionType.MINOR);
txnHandler.compact(rqst);
assertNotNull(txnHandler.findNextToCompact("fred-193892"));
assertNotNull(txnHandler.findNextToCompact("bob-193892"));
assertNotNull(txnHandler.findNextToCompact("fred-193893"));
txnHandler.revokeFromLocalWorkers("fred");
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
assertEquals(3,compacts.size());
boolean sawWorkingBob=false;
int initiatedCount=0;
for ( ShowCompactResponseElement c : compacts) {
if (c.getState().equals("working")) {
assertEquals("bob-193892",c.getWorkerid());
sawWorkingBob=true;
}
else if (c.getState().equals("initiated")) {
initiatedCount++;
}
else {
fail("Unexpected state");
}
}
assertTrue(sawWorkingBob);
assertEquals(2,initiatedCount);
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testMarkCleaned() throws Exception {
CompactionRequest rqst=new CompactionRequest("foo","bar",CompactionType.MINOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
assertEquals(0,txnHandler.findReadyToClean().size());
CompactionInfo ci=txnHandler.findNextToCompact("fred");
assertNotNull(ci);
assertEquals(0,txnHandler.findReadyToClean().size());
txnHandler.markCompacted(ci);
assertNull(txnHandler.findNextToCompact("fred"));
List toClean=txnHandler.findReadyToClean();
assertEquals(1,toClean.size());
assertNull(txnHandler.findNextToCompact("fred"));
txnHandler.markCleaned(ci);
assertNull(txnHandler.findNextToCompact("fred"));
assertEquals(0,txnHandler.findReadyToClean().size());
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
assertEquals(1,rsp.getCompactsSize());
assertTrue(TxnHandler.SUCCEEDED_RESPONSE.equals(rsp.getCompacts().get(0).getState()));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testFindNextToClean() throws Exception {
CompactionRequest rqst=new CompactionRequest("foo","bar",CompactionType.MINOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
assertEquals(0,txnHandler.findReadyToClean().size());
CompactionInfo ci=txnHandler.findNextToCompact("fred");
assertNotNull(ci);
assertEquals(0,txnHandler.findReadyToClean().size());
txnHandler.markCompacted(ci);
assertNull(txnHandler.findNextToCompact("fred"));
List toClean=txnHandler.findReadyToClean();
assertEquals(1,toClean.size());
assertNull(txnHandler.findNextToCompact("fred"));
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
assertEquals(1,compacts.size());
ShowCompactResponseElement c=compacts.get(0);
assertEquals("foo",c.getDbname());
assertEquals("bar",c.getTablename());
assertEquals("ds=today",c.getPartitionname());
assertEquals(CompactionType.MINOR,c.getType());
assertEquals("ready for cleaning",c.getState());
assertNull(c.getWorkerid());
}
BooleanVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testFindPotentialCompactions() throws Exception {
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
List components=new ArrayList(1);
components.add(comp);
comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
comp.setTablename("yourtable");
comp.setPartitionname("mypartition");
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
assertTrue(res.getState() == LockState.ACQUIRED);
txnHandler.commitTxn(new CommitTxnRequest(txnid));
assertEquals(0,txnHandler.numLocksInLockTable());
Set potentials=txnHandler.findPotentialCompactions(100);
assertEquals(2,potentials.size());
boolean sawMyTable=false, sawYourTable=false;
for ( CompactionInfo ci : potentials) {
sawMyTable|=(ci.dbname.equals("mydb") && ci.tableName.equals("mytable") && ci.partName == null);
sawYourTable|=(ci.dbname.equals("mydb") && ci.tableName.equals("yourtable") && ci.partName.equals("mypartition"));
}
assertTrue(sawMyTable);
assertTrue(sawYourTable);
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testFindNextToCompact() throws Exception {
CompactionRequest rqst=new CompactionRequest("foo","bar",CompactionType.MINOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
long now=System.currentTimeMillis();
CompactionInfo ci=txnHandler.findNextToCompact("fred");
assertNotNull(ci);
assertEquals("foo",ci.dbname);
assertEquals("bar",ci.tableName);
assertEquals("ds=today",ci.partName);
assertEquals(CompactionType.MINOR,ci.type);
assertNull(ci.runAs);
assertNull(txnHandler.findNextToCompact("fred"));
txnHandler.setRunAs(ci.id,"bob");
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
assertEquals(1,compacts.size());
ShowCompactResponseElement c=compacts.get(0);
assertEquals("foo",c.getDbname());
assertEquals("bar",c.getTablename());
assertEquals("ds=today",c.getPartitionname());
assertEquals(CompactionType.MINOR,c.getType());
assertEquals("working",c.getState());
assertTrue(c.getStart() - 5000 < now && c.getStart() + 5000 > now);
assertEquals("fred",c.getWorkerid());
assertEquals("bob",c.getRunAs());
}
BranchVerifier UtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testRevokeTimedOutWorkers() throws Exception {
CompactionRequest rqst=new CompactionRequest("foo","bar",CompactionType.MINOR);
txnHandler.compact(rqst);
rqst=new CompactionRequest("foo","baz",CompactionType.MINOR);
txnHandler.compact(rqst);
assertNotNull(txnHandler.findNextToCompact("fred-193892"));
Thread.sleep(200);
assertNotNull(txnHandler.findNextToCompact("fred-193892"));
txnHandler.revokeTimedoutWorkers(100);
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
assertEquals(2,compacts.size());
boolean sawWorking=false, sawInitiated=false;
for ( ShowCompactResponseElement c : compacts) {
if (c.getState().equals("working")) sawWorking=true;
else if (c.getState().equals("initiated")) sawInitiated=true;
else fail("Unexpected state");
}
assertTrue(sawWorking);
assertTrue(sawInitiated);
}
BranchVerifier UtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testFindNextToCompact2() throws Exception {
CompactionRequest rqst=new CompactionRequest("foo","bar",CompactionType.MINOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
rqst=new CompactionRequest("foo","bar",CompactionType.MINOR);
rqst.setPartitionname("ds=yesterday");
txnHandler.compact(rqst);
long now=System.currentTimeMillis();
boolean expectToday=false;
CompactionInfo ci=txnHandler.findNextToCompact("fred");
assertNotNull(ci);
assertEquals("foo",ci.dbname);
assertEquals("bar",ci.tableName);
if ("ds=today".equals(ci.partName)) expectToday=false;
else if ("ds=yesterday".equals(ci.partName)) expectToday=true;
else fail("partition name should have been today or yesterday but was " + ci.partName);
assertEquals(CompactionType.MINOR,ci.type);
ci=txnHandler.findNextToCompact("fred");
assertNotNull(ci);
assertEquals("foo",ci.dbname);
assertEquals("bar",ci.tableName);
if (expectToday) assertEquals("ds=today",ci.partName);
else assertEquals("ds=yesterday",ci.partName);
assertEquals(CompactionType.MINOR,ci.type);
assertNull(txnHandler.findNextToCompact("fred"));
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
assertEquals(2,compacts.size());
for ( ShowCompactResponseElement e : compacts) {
assertEquals("working",e.getState());
assertTrue(e.getStart() - 5000 < now && e.getStart() + 5000 > now);
assertEquals("fred",e.getWorkerid());
}
}
APIUtilityVerifier BranchVerifier InternalCallVerifier EqualityVerifier
@Test public void addDynamicPartitions() throws Exception {
String dbName="default";
String tableName="adp_table";
OpenTxnsResponse openTxns=txnHandler.openTxns(new OpenTxnRequest(1,"me","localhost"));
long txnId=openTxns.getTxn_ids().get(0);
LockComponent lc=new LockComponent(LockType.SHARED_WRITE,LockLevel.TABLE,dbName);
lc.setTablename(tableName);
LockRequest lr=new LockRequest(Arrays.asList(lc),"me","localhost");
lr.setTxnid(txnId);
LockResponse lock=txnHandler.lock(new LockRequest(Arrays.asList(lc),"me","localhost"));
assertEquals(LockState.ACQUIRED,lock.getState());
txnHandler.addDynamicPartitions(new AddDynamicPartitions(txnId,dbName,tableName,Arrays.asList("ds=yesterday","ds=today")));
txnHandler.commitTxn(new CommitTxnRequest(txnId));
Set potentials=txnHandler.findPotentialCompactions(1000);
assertEquals(2,potentials.size());
SortedSet sorted=new TreeSet(potentials);
int i=0;
for ( CompactionInfo ci : sorted) {
assertEquals(dbName,ci.dbname);
assertEquals(tableName,ci.tableName);
switch (i++) {
case 0:
assertEquals("ds=today",ci.partName);
break;
case 1:
assertEquals("ds=yesterday",ci.partName);
break;
default :
throw new RuntimeException("What?");
}
}
}
Class: org.apache.hadoop.hive.metastore.txn.TestTxnHandler InternalCallVerifier EqualityVerifier
@Test public void heartbeatTxnRange() throws Exception {
long txnid=openTxn();
assertEquals(1,txnid);
txnid=openTxn();
txnid=openTxn();
HeartbeatTxnRangeResponse rsp=txnHandler.heartbeatTxnRange(new HeartbeatTxnRangeRequest(1,3));
assertEquals(0,rsp.getAborted().size());
assertEquals(0,rsp.getNosuch().size());
}
IterativeVerifier BranchVerifier UtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void showLocks() throws Exception {
long begining=System.currentTimeMillis();
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
txnid=openTxn();
comp=new LockComponent(LockType.SHARED_READ,LockLevel.TABLE,"mydb");
comp.setTablename("mytable");
components=new ArrayList(1);
components.add(comp);
req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
res=txnHandler.lock(req);
components=new ArrayList(1);
comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.PARTITION,"yourdb");
comp.setTablename("yourtable");
comp.setPartitionname("yourpartition");
components.add(comp);
req=new LockRequest(components,"you","remotehost");
res=txnHandler.lock(req);
ShowLocksResponse rsp=txnHandler.showLocks(new ShowLocksRequest());
List locks=rsp.getLocks();
assertEquals(3,locks.size());
boolean[] saw=new boolean[locks.size()];
for (int i=0; i < saw.length; i++) saw[i]=false;
for ( ShowLocksResponseElement lock : locks) {
if (lock.getLockid() == 1) {
assertEquals(1,lock.getTxnid());
assertEquals("mydb",lock.getDbname());
assertNull(lock.getTablename());
assertNull(lock.getPartname());
assertEquals(LockState.ACQUIRED,lock.getState());
assertEquals(LockType.EXCLUSIVE,lock.getType());
assertTrue(lock.toString(),0 == lock.getLastheartbeat() && lock.getTxnid() != 0);
assertTrue("Expected acquired at " + lock.getAcquiredat() + " to be between "+ begining+ " and "+ System.currentTimeMillis(),begining <= lock.getAcquiredat() && System.currentTimeMillis() >= lock.getAcquiredat());
assertEquals("me",lock.getUser());
assertEquals("localhost",lock.getHostname());
saw[0]=true;
}
else if (lock.getLockid() == 2) {
assertEquals(2,lock.getTxnid());
assertEquals("mydb",lock.getDbname());
assertEquals("mytable",lock.getTablename());
assertNull(lock.getPartname());
assertEquals(LockState.WAITING,lock.getState());
assertEquals(LockType.SHARED_READ,lock.getType());
assertTrue(lock.toString(),0 == lock.getLastheartbeat() && lock.getTxnid() != 0);
assertEquals(0,lock.getAcquiredat());
assertEquals("me",lock.getUser());
assertEquals("localhost",lock.getHostname());
saw[1]=true;
}
else if (lock.getLockid() == 3) {
assertEquals(0,lock.getTxnid());
assertEquals("yourdb",lock.getDbname());
assertEquals("yourtable",lock.getTablename());
assertEquals("yourpartition",lock.getPartname());
assertEquals(LockState.ACQUIRED,lock.getState());
assertEquals(LockType.SHARED_WRITE,lock.getType());
assertTrue(lock.toString(),begining <= lock.getLastheartbeat() && System.currentTimeMillis() >= lock.getLastheartbeat());
assertTrue(begining <= lock.getAcquiredat() && System.currentTimeMillis() >= lock.getAcquiredat());
assertEquals("you",lock.getUser());
assertEquals("remotehost",lock.getHostname());
saw[2]=true;
}
else {
fail("Unknown lock id");
}
}
for (int i=0; i < saw.length; i++) assertTrue("Didn't see lock id " + i,saw[i]);
}
EqualityVerifier
@Test public void testRecoverManyTimeouts() throws Exception {
long timeout=txnHandler.setTimeout(1);
try {
txnHandler.openTxns(new OpenTxnRequest(503,"me","localhost"));
Thread.sleep(10);
txnHandler.performTimeOuts();
GetOpenTxnsInfoResponse rsp=txnHandler.getOpenTxnsInfo();
int numAborted=0;
for ( TxnInfo txnInfo : rsp.getOpen_txns()) {
assertEquals(TxnState.ABORTED,txnInfo.getState());
numAborted++;
}
assertEquals(503,numAborted);
}
finally {
txnHandler.setTimeout(timeout);
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testValidTxnsEmpty() throws Exception {
GetOpenTxnsInfoResponse txnsInfo=txnHandler.getOpenTxnsInfo();
assertEquals(0L,txnsInfo.getTxn_high_water_mark());
assertTrue(txnsInfo.getOpen_txns().isEmpty());
GetOpenTxnsResponse txns=txnHandler.getOpenTxns();
assertEquals(0L,txns.getTxn_high_water_mark());
assertTrue(txns.getOpen_txns().isEmpty());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testUnlockOnCommit() throws Exception {
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
assertTrue(res.getState() == LockState.ACQUIRED);
txnHandler.commitTxn(new CommitTxnRequest(txnid));
assertEquals(0,txnHandler.numLocksInLockTable());
}
APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testAbortTxn() throws Exception {
OpenTxnsResponse openedTxns=txnHandler.openTxns(new OpenTxnRequest(2,"me","localhost"));
List txnList=openedTxns.getTxn_ids();
long first=txnList.get(0);
assertEquals(1L,first);
long second=txnList.get(1);
assertEquals(2L,second);
txnHandler.abortTxn(new AbortTxnRequest(1));
GetOpenTxnsInfoResponse txnsInfo=txnHandler.getOpenTxnsInfo();
assertEquals(2L,txnsInfo.getTxn_high_water_mark());
assertEquals(2,txnsInfo.getOpen_txns().size());
assertEquals(1L,txnsInfo.getOpen_txns().get(0).getId());
assertEquals(TxnState.ABORTED,txnsInfo.getOpen_txns().get(0).getState());
assertEquals(2L,txnsInfo.getOpen_txns().get(1).getId());
assertEquals(TxnState.OPEN,txnsInfo.getOpen_txns().get(1).getState());
GetOpenTxnsResponse txns=txnHandler.getOpenTxns();
assertEquals(2L,txns.getTxn_high_water_mark());
assertEquals(2,txns.getOpen_txns().size());
boolean[] saw=new boolean[3];
for (int i=0; i < saw.length; i++) saw[i]=false;
for ( Long tid : txns.getOpen_txns()) {
saw[tid.intValue()]=true;
}
for (int i=1; i < saw.length; i++) assertTrue(saw[i]);
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testCompactMinorNoPartition() throws Exception {
CompactionRequest rqst=new CompactionRequest("foo","bar",CompactionType.MINOR);
rqst.setRunas("fred");
txnHandler.compact(rqst);
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
assertEquals(1,compacts.size());
ShowCompactResponseElement c=compacts.get(0);
assertEquals("foo",c.getDbname());
assertEquals("bar",c.getTablename());
assertNull(c.getPartitionname());
assertEquals(CompactionType.MINOR,c.getType());
assertEquals("initiated",c.getState());
assertEquals(0L,c.getStart());
assertEquals("fred",c.getRunAs());
}
InternalCallVerifier EqualityVerifier
@Test public void heartbeatTxnRangeOneAborted() throws Exception {
long txnid=openTxn();
assertEquals(1,txnid);
txnid=openTxn();
txnid=openTxn();
txnHandler.abortTxn(new AbortTxnRequest(3));
HeartbeatTxnRangeResponse rsp=txnHandler.heartbeatTxnRange(new HeartbeatTxnRangeRequest(1,3));
assertEquals(1,rsp.getAbortedSize());
Long txn=rsp.getAborted().iterator().next();
assertEquals(3L,(long)txn);
assertEquals(0,rsp.getNosuch().size());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testUnlockOnAbort() throws Exception {
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.DB,"mydb");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
assertTrue(res.getState() == LockState.ACQUIRED);
txnHandler.abortTxn(new AbortTxnRequest(txnid));
assertEquals(0,txnHandler.numLocksInLockTable());
}
InternalCallVerifier EqualityVerifier
@Test public void testValidTxnsNoneOpen() throws Exception {
txnHandler.openTxns(new OpenTxnRequest(2,"me","localhost"));
txnHandler.commitTxn(new CommitTxnRequest(1));
txnHandler.commitTxn(new CommitTxnRequest(2));
GetOpenTxnsInfoResponse txnsInfo=txnHandler.getOpenTxnsInfo();
assertEquals(2L,txnsInfo.getTxn_high_water_mark());
assertEquals(0,txnsInfo.getOpen_txns().size());
GetOpenTxnsResponse txns=txnHandler.getOpenTxns();
assertEquals(2L,txns.getTxn_high_water_mark());
assertEquals(0,txns.getOpen_txns().size());
}
InternalCallVerifier EqualityVerifier
@Test public void heartbeatTxnRangeOneCommitted() throws Exception {
long txnid=openTxn();
assertEquals(1,txnid);
txnHandler.commitTxn(new CommitTxnRequest(1));
txnid=openTxn();
txnid=openTxn();
HeartbeatTxnRangeResponse rsp=txnHandler.heartbeatTxnRange(new HeartbeatTxnRangeRequest(1,3));
assertEquals(1,rsp.getNosuchSize());
Long txn=rsp.getNosuch().iterator().next();
assertEquals(1L,(long)txn);
assertEquals(0,rsp.getAborted().size());
}
IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testOpenTxn() throws Exception {
long first=openTxn();
assertEquals(1L,first);
long second=openTxn();
assertEquals(2L,second);
GetOpenTxnsInfoResponse txnsInfo=txnHandler.getOpenTxnsInfo();
assertEquals(2L,txnsInfo.getTxn_high_water_mark());
assertEquals(2,txnsInfo.getOpen_txns().size());
assertEquals(1L,txnsInfo.getOpen_txns().get(0).getId());
assertEquals(TxnState.OPEN,txnsInfo.getOpen_txns().get(0).getState());
assertEquals(2L,txnsInfo.getOpen_txns().get(1).getId());
assertEquals(TxnState.OPEN,txnsInfo.getOpen_txns().get(1).getState());
assertEquals("me",txnsInfo.getOpen_txns().get(1).getUser());
assertEquals("localhost",txnsInfo.getOpen_txns().get(1).getHostname());
GetOpenTxnsResponse txns=txnHandler.getOpenTxns();
assertEquals(2L,txns.getTxn_high_water_mark());
assertEquals(2,txns.getOpen_txns().size());
boolean[] saw=new boolean[3];
for (int i=0; i < saw.length; i++) saw[i]=false;
for ( Long tid : txns.getOpen_txns()) {
saw[tid.intValue()]=true;
}
for (int i=1; i < saw.length; i++) assertTrue(saw[i]);
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testMultipleLock() throws Exception {
LockComponent comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypartition");
List components=new ArrayList(2);
components.add(comp);
comp=new LockComponent(LockType.EXCLUSIVE,LockLevel.DB,"mydb");
comp.setTablename("mytable");
comp.setPartitionname("anotherpartition");
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
LockResponse res=txnHandler.lock(req);
long lockid=res.getLockid();
assertTrue(res.getState() == LockState.ACQUIRED);
res=txnHandler.checkLock(new CheckLockRequest(lockid));
assertTrue(res.getState() == LockState.ACQUIRED);
txnHandler.unlock(new UnlockRequest(lockid));
assertEquals(0,txnHandler.numLocksInLockTable());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testCompactMajorWithPartition() throws Exception {
CompactionRequest rqst=new CompactionRequest("foo","bar",CompactionType.MAJOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
assertEquals(1,compacts.size());
ShowCompactResponseElement c=compacts.get(0);
assertEquals("foo",c.getDbname());
assertEquals("bar",c.getTablename());
assertEquals("ds=today",c.getPartitionname());
assertEquals(CompactionType.MAJOR,c.getType());
assertEquals("initiated",c.getState());
assertEquals(0L,c.getStart());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testValidTxnsSomeOpen() throws Exception {
txnHandler.openTxns(new OpenTxnRequest(3,"me","localhost"));
txnHandler.abortTxn(new AbortTxnRequest(1));
txnHandler.commitTxn(new CommitTxnRequest(2));
GetOpenTxnsInfoResponse txnsInfo=txnHandler.getOpenTxnsInfo();
assertEquals(3L,txnsInfo.getTxn_high_water_mark());
assertEquals(2,txnsInfo.getOpen_txns().size());
assertEquals(1L,txnsInfo.getOpen_txns().get(0).getId());
assertEquals(TxnState.ABORTED,txnsInfo.getOpen_txns().get(0).getState());
assertEquals(3L,txnsInfo.getOpen_txns().get(1).getId());
assertEquals(TxnState.OPEN,txnsInfo.getOpen_txns().get(1).getState());
GetOpenTxnsResponse txns=txnHandler.getOpenTxns();
assertEquals(3L,txns.getTxn_high_water_mark());
assertEquals(2,txns.getOpen_txns().size());
boolean[] saw=new boolean[4];
for (int i=0; i < saw.length; i++) saw[i]=false;
for ( Long tid : txns.getOpen_txns()) {
saw[tid.intValue()]=true;
}
assertTrue(saw[1]);
assertFalse(saw[2]);
assertTrue(saw[3]);
}
Class: org.apache.hadoop.hive.metastore.txn.TestValidCompactorTxnList InternalCallVerifier EqualityVerifier
@Test public void minTxnHighNoExceptions(){
ValidTxnList txns=new ValidCompactorTxnList(new long[0],-1,5);
ValidTxnList.RangeResponse rsp=txns.isTxnRangeValid(7,9);
Assert.assertEquals(ValidTxnList.RangeResponse.NONE,rsp);
}
InternalCallVerifier EqualityVerifier
@Test public void minTxnHigh(){
ValidTxnList txns=new ValidCompactorTxnList(new long[]{3,4},3,5);
ValidTxnList.RangeResponse rsp=txns.isTxnRangeValid(7,9);
Assert.assertEquals(ValidTxnList.RangeResponse.NONE,rsp);
}
InternalCallVerifier EqualityVerifier
@Test public void exceptionsAllBelow(){
ValidTxnList txns=new ValidCompactorTxnList(new long[]{3,6},3,15);
ValidTxnList.RangeResponse rsp=txns.isTxnRangeValid(7,9);
Assert.assertEquals(ValidTxnList.RangeResponse.NONE,rsp);
}
InternalCallVerifier EqualityVerifier
@Test public void maxTxnLow(){
ValidTxnList txns=new ValidCompactorTxnList(new long[]{13,14},13,15);
ValidTxnList.RangeResponse rsp=txns.isTxnRangeValid(7,9);
Assert.assertEquals(ValidTxnList.RangeResponse.ALL,rsp);
}
EqualityVerifier
@Test public void writeToString(){
ValidTxnList txns=new ValidCompactorTxnList(new long[]{9,7,10},9,37);
Assert.assertEquals("37:9:7:9:10",txns.writeToString());
txns=new ValidCompactorTxnList();
Assert.assertEquals(Long.toString(Long.MAX_VALUE) + ":-1:",txns.writeToString());
txns=new ValidCompactorTxnList(new long[0],-1,23);
Assert.assertEquals("23:-1:",txns.writeToString());
}
InternalCallVerifier EqualityVerifier
@Test public void exceptionsInMidst(){
ValidTxnList txns=new ValidCompactorTxnList(new long[]{8},8,15);
ValidTxnList.RangeResponse rsp=txns.isTxnRangeValid(7,9);
Assert.assertEquals(ValidTxnList.RangeResponse.NONE,rsp);
}
InternalCallVerifier EqualityVerifier
@Test public void maxTxnLowNoExceptions(){
ValidTxnList txns=new ValidCompactorTxnList(new long[0],-1,15);
ValidTxnList.RangeResponse rsp=txns.isTxnRangeValid(7,9);
Assert.assertEquals(ValidTxnList.RangeResponse.ALL,rsp);
}
InternalCallVerifier EqualityVerifier
@Test public void readFromString(){
ValidCompactorTxnList txns=new ValidCompactorTxnList("37:9:7:9:10");
Assert.assertEquals(37L,txns.getHighWatermark());
Assert.assertEquals(9L,txns.getMinOpenTxn());
Assert.assertArrayEquals(new long[]{7L,9L,10L},txns.getInvalidTransactions());
txns=new ValidCompactorTxnList("21:-1:");
Assert.assertEquals(21L,txns.getHighWatermark());
Assert.assertEquals(-1L,txns.getMinOpenTxn());
Assert.assertEquals(0,txns.getInvalidTransactions().length);
}
Class: org.apache.hadoop.hive.ql.TestCreateUdfEntities InternalCallVerifier EqualityVerifier
@Test public void testUdfWithDfsResource() throws Exception {
int rc=driver.compile("CREATE FUNCTION default." + funcName + " AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFPrintf' "+ " using file '"+ "hdfs:///tmp/udf1.jar'");
assertEquals(0,rc);
WriteEntity outputEntities[]=driver.getPlan().getOutputs().toArray(new WriteEntity[]{});
assertEquals(outputEntities.length,3);
assertEquals(Entity.Type.DATABASE,outputEntities[0].getType());
assertEquals("default",outputEntities[0].getDatabase().getName());
assertEquals(Entity.Type.FUNCTION,outputEntities[1].getType());
assertEquals(funcName,outputEntities[1].getFunctionName());
assertEquals(Entity.Type.DFS_DIR,outputEntities[2].getType());
assertEquals("hdfs:///tmp/udf1.jar",outputEntities[2].getLocation().toString());
}
InternalCallVerifier EqualityVerifier
@Test public void testUdfWithLocalResource() throws Exception {
int rc=driver.compile("CREATE FUNCTION " + funcName + " AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFPrintf' "+ " using file '"+ "file:///tmp/udf1.jar'");
assertEquals(0,rc);
WriteEntity outputEntities[]=driver.getPlan().getOutputs().toArray(new WriteEntity[]{});
assertEquals(outputEntities.length,3);
assertEquals(Entity.Type.DATABASE,outputEntities[0].getType());
assertEquals("default",outputEntities[0].getDatabase().getName());
assertEquals(Entity.Type.FUNCTION,outputEntities[1].getType());
assertEquals(funcName,outputEntities[1].getFunctionName());
assertEquals(Entity.Type.LOCAL_DIR,outputEntities[2].getType());
assertEquals("file:///tmp/udf1.jar",outputEntities[2].getLocation().toString());
}
Class: org.apache.hadoop.hive.ql.TestTxnCommands EqualityVerifier
@Test public void testDeleteIn() throws Exception {
runStatementOnDriver("delete from " + Table.ACIDTBL + " where a IN (SELECT A.a from "+ Table.ACIDTBL+ " A)");
int[][] tableData={{1,2},{3,2},{5,2},{1,3},{3,3},{5,3}};
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) "+ makeValuesClause(tableData));
runStatementOnDriver("insert into " + Table.ACIDTBL2 + "(a,b,c) values(1,7,17),(3,7,17)");
runStatementOnDriver("delete from " + Table.ACIDTBL + " where a in(select a from "+ Table.ACIDTBL2+ ")");
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) select a,b from "+ Table.ACIDTBL2);
List rs=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
int[][] updatedData={{1,7},{3,7},{5,2},{5,3}};
Assert.assertEquals("Bulk update failed",stringifyValues(updatedData),rs);
}
APIUtilityVerifier EqualityVerifier
@Test public void testUpdateDeleteOfInserts() throws Exception {
int[][] rows1={{1,2},{3,4}};
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) "+ makeValuesClause(rows1));
List rs0=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Content didn't match rs0",stringifyValues(rows1),rs0);
runStatementOnDriver("set autocommit false");
runStatementOnDriver("START TRANSACTION");
int[][] rows2={{5,6},{7,8}};
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) "+ makeValuesClause(rows2));
List rs1=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
List allData=stringifyValues(rows1);
allData.addAll(stringifyValues(rows2));
Assert.assertEquals("Content didn't match rs1",allData,rs1);
runStatementOnDriver("update " + Table.ACIDTBL + " set b = 1 where b != 1");
int[][] updatedData={{1,1},{3,1},{5,1},{7,1}};
List rs2=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Wrong data after update",stringifyValues(updatedData),rs2);
runStatementOnDriver("delete from " + Table.ACIDTBL + " where a = 7 and b = 1");
dumpTableData(Table.ACIDTBL,1,0);
dumpTableData(Table.ACIDTBL,2,0);
dumpTableData(Table.ACIDTBL,2,2);
dumpTableData(Table.ACIDTBL,2,4);
int[][] updatedData2={{1,1},{3,1},{5,1}};
List rs3=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Wrong data after delete",stringifyValues(updatedData2),rs3);
runStatementOnDriver("commit");
runStatementOnDriver("set autocommit true");
List rs4=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Wrong data after commit",stringifyValues(updatedData2),rs4);
}
EqualityVerifier
@Test public void testImplicitRollback() throws Exception {
runStatementOnDriver("set autocommit false");
runStatementOnDriver("START TRANSACTION");
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) values(1,2)");
List rs0=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Can't see my own write",1,rs0.size());
CommandProcessorResponse cpr=runStatementOnDriverNegative("select * from no_such_table");
Assert.assertEquals("Txn didn't fail?","FAILED: SemanticException [Error 10001]: Line 1:14 Table not found 'no_such_table'",cpr.getErrorMessage());
runStatementOnDriver("start transaction");
List rs1=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
runStatementOnDriver("commit");
Assert.assertEquals("Didn't rollback as expected",0,rs1.size());
}
EqualityVerifier
@Test public void testMultipleDelete() throws Exception {
int[][] rows1={{1,2},{3,4},{5,6},{7,8}};
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) "+ makeValuesClause(rows1));
List rs0=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Content didn't match rs0",stringifyValues(rows1),rs0);
runStatementOnDriver("set autocommit false");
runStatementOnDriver("START TRANSACTION");
runStatementOnDriver("delete from " + Table.ACIDTBL + " where b = 8");
int[][] updatedData2={{1,2},{3,4},{5,6}};
List rs2=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Wrong data after delete",stringifyValues(updatedData2),rs2);
runStatementOnDriver("delete from " + Table.ACIDTBL + " where b = 4");
int[][] updatedData3={{1,2},{5,6}};
List rs3=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Wrong data after delete2",stringifyValues(updatedData3),rs3);
runStatementOnDriver("update " + Table.ACIDTBL + " set b=3");
dumpTableData(Table.ACIDTBL,1,0);
dumpTableData(Table.ACIDTBL,2,0);
dumpTableData(Table.ACIDTBL,2,2);
dumpTableData(Table.ACIDTBL,2,4);
List rs5=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
int[][] updatedData4={{1,3},{5,3}};
Assert.assertEquals("Wrong data after delete",stringifyValues(updatedData4),rs5);
runStatementOnDriver("commit");
runStatementOnDriver("set autocommit true");
List rs4=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Wrong data after commit",stringifyValues(updatedData4),rs4);
}
EqualityVerifier
@Test public void testExplicitRollback() throws Exception {
runStatementOnDriver("set autocommit false");
runStatementOnDriver("START TRANSACTION");
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) values(1,2)");
runStatementOnDriver("ROLLBACK");
runStatementOnDriver("set autocommit true");
List rs=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Rollback didn't rollback",0,rs.size());
}
APIUtilityVerifier EqualityVerifier
@Test public void testMultipleInserts() throws Exception {
runStatementOnDriver("set autocommit false");
runStatementOnDriver("START TRANSACTION");
int[][] rows1={{1,2},{3,4}};
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) "+ makeValuesClause(rows1));
int[][] rows2={{5,6},{7,8}};
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) "+ makeValuesClause(rows2));
List allData=stringifyValues(rows1);
allData.addAll(stringifyValues(rows2));
List rs=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Content didn't match before commit rs",allData,rs);
runStatementOnDriver("commit");
dumpTableData(Table.ACIDTBL,1,0);
dumpTableData(Table.ACIDTBL,1,1);
runStatementOnDriver("set autocommit true");
List rs1=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Content didn't match after commit rs1",allData,rs1);
}
APIUtilityVerifier EqualityVerifier
@Test public void testSimpleAcidInsert() throws Exception {
int[][] rows1={{1,2},{3,4}};
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) "+ makeValuesClause(rows1));
runStatementOnDriver("set autocommit false");
runStatementOnDriver("START TRANSACTION");
int[][] rows2={{5,6},{7,8}};
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) "+ makeValuesClause(rows2));
List allData=stringifyValues(rows1);
allData.addAll(stringifyValues(rows2));
List rs0=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Data didn't match inside tx (rs0)",allData,rs0);
runStatementOnDriver("COMMIT WORK");
dumpTableData(Table.ACIDTBL,1,0);
dumpTableData(Table.ACIDTBL,2,0);
runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
runStatementOnDriver("COMMIT");
runStatementOnDriver("set autocommit true");
List rs1=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Data didn't match inside tx (rs0)",allData,rs1);
}
EqualityVerifier
@Test public void testDelete() throws Exception {
int[][] rows1={{1,2},{3,4}};
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) "+ makeValuesClause(rows1));
List rs0=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Content didn't match rs0",stringifyValues(rows1),rs0);
runStatementOnDriver("set autocommit false");
runStatementOnDriver("START TRANSACTION");
runStatementOnDriver("delete from " + Table.ACIDTBL + " where b = 4");
int[][] updatedData2={{1,2}};
List rs3=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Wrong data after delete",stringifyValues(updatedData2),rs3);
runStatementOnDriver("commit");
runStatementOnDriver("set autocommit true");
List rs4=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Wrong data after commit",stringifyValues(updatedData2),rs4);
}
EqualityVerifier
@Test public void testReadMyOwnInsert() throws Exception {
runStatementOnDriver("set autocommit false");
runStatementOnDriver("START TRANSACTION");
List rs=runStatementOnDriver("select * from " + Table.ACIDTBL);
Assert.assertEquals("Expected empty " + Table.ACIDTBL,0,rs.size());
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) values(1,2)");
List rs0=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Can't see my own write",1,rs0.size());
runStatementOnDriver("commit");
runStatementOnDriver("START TRANSACTION");
List rs1=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
runStatementOnDriver("rollback work");
Assert.assertEquals("Can't see write after commit",1,rs1.size());
}
APIUtilityVerifier EqualityVerifier
@Test public void testUpdateOfInserts() throws Exception {
int[][] rows1={{1,2},{3,4}};
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) "+ makeValuesClause(rows1));
List rs0=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Content didn't match rs0",stringifyValues(rows1),rs0);
runStatementOnDriver("set autocommit false");
runStatementOnDriver("START TRANSACTION");
int[][] rows2={{5,6},{7,8}};
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) "+ makeValuesClause(rows2));
List rs1=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
List allData=stringifyValues(rows1);
allData.addAll(stringifyValues(rows2));
Assert.assertEquals("Content didn't match rs1",allData,rs1);
runStatementOnDriver("update " + Table.ACIDTBL + " set b = 1 where b != 1");
int[][] updatedData={{1,1},{3,1},{5,1},{7,1}};
List rs2=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Wrong data after update",stringifyValues(updatedData),rs2);
runStatementOnDriver("commit");
runStatementOnDriver("set autocommit true");
List rs4=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Wrong data after commit",stringifyValues(updatedData),rs4);
}
InternalCallVerifier EqualityVerifier
/**
* add tests for all transitions - AC=t, AC=t, AC=f, commit (for example)
* @throws Exception
*/
@Test public void testErrors() throws Exception {
runStatementOnDriver("set autocommit true");
CommandProcessorResponse cpr=runStatementOnDriverNegative("start transaction");
Assert.assertEquals("Error didn't match: " + cpr,ErrorMsg.OP_NOT_ALLOWED_IN_AUTOCOMMIT.getErrorCode(),cpr.getErrorCode());
runStatementOnDriver("set autocommit false");
runStatementOnDriver("start transaction");
CommandProcessorResponse cpr2=runStatementOnDriverNegative("create table foo(x int, y int)");
Assert.assertEquals("Expected DDL to fail in an open txn",ErrorMsg.OP_NOT_ALLOWED_IN_TXN.getErrorCode(),cpr2.getErrorCode());
runStatementOnDriver("set autocommit true");
CommandProcessorResponse cpr3=runStatementOnDriverNegative("update " + Table.ACIDTBL + " set a = 1 where b != 1");
Assert.assertEquals("Expected update of bucket column to fail","FAILED: SemanticException [Error 10302]: Updating values of bucketing columns is not supported. Column a.",cpr3.getErrorMessage());
cpr3=runStatementOnDriverNegative("commit work");
Assert.assertEquals("Error didn't match: " + cpr3,ErrorMsg.OP_NOT_ALLOWED_IN_AUTOCOMMIT.getErrorCode(),cpr.getErrorCode());
cpr3=runStatementOnDriverNegative("rollback work");
Assert.assertEquals("Error didn't match: " + cpr3,ErrorMsg.OP_NOT_ALLOWED_IN_AUTOCOMMIT.getErrorCode(),cpr.getErrorCode());
runStatementOnDriver("set autocommit false");
cpr3=runStatementOnDriverNegative("commit");
Assert.assertEquals("Error didn't match: " + cpr3,ErrorMsg.OP_NOT_ALLOWED_IN_AUTOCOMMIT.getErrorCode(),cpr.getErrorCode());
cpr3=runStatementOnDriverNegative("rollback");
Assert.assertEquals("Error didn't match: " + cpr3,ErrorMsg.OP_NOT_ALLOWED_IN_AUTOCOMMIT.getErrorCode(),cpr.getErrorCode());
runStatementOnDriver("start transaction");
cpr3=runStatementOnDriverNegative("start transaction");
Assert.assertEquals("Expected start transaction to fail",ErrorMsg.OP_NOT_ALLOWED_IN_TXN.getErrorCode(),cpr3.getErrorCode());
runStatementOnDriver("start transaction");
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) values(1,2)");
List rs0=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Can't see my own write",1,rs0.size());
runStatementOnDriver("set autocommit true");
rs0=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Can't see my own write",1,rs0.size());
}
Class: org.apache.hadoop.hive.ql.TestTxnCommands2 EqualityVerifier
@Test public void testUpdateMixedCase() throws Exception {
int[][] tableData={{1,2},{3,3},{5,3}};
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) "+ makeValuesClause(tableData));
runStatementOnDriver("update " + Table.ACIDTBL + " set B = 7 where A=1");
List rs=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
int[][] updatedData={{1,7},{3,3},{5,3}};
Assert.assertEquals("Update failed",stringifyValues(updatedData),rs);
runStatementOnDriver("update " + Table.ACIDTBL + " set B = B + 1 where A=1");
List rs2=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
int[][] updatedData2={{1,8},{3,3},{5,3}};
Assert.assertEquals("Update failed",stringifyValues(updatedData2),rs2);
}
EqualityVerifier
/**
* https://issues.apache.org/jira/browse/HIVE-10151
*/
@Test public void testBucketizedInputFormat() throws Exception {
int[][] tableData={{1,2}};
runStatementOnDriver("insert into " + Table.ACIDTBLPART + " partition(p=1) (a,b) "+ makeValuesClause(tableData));
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) select a,b from "+ Table.ACIDTBLPART+ " where p = 1");
List rs=runStatementOnDriver("select a,b from " + Table.ACIDTBL);
Assert.assertEquals("Insert into " + Table.ACIDTBL + " didn't match:",stringifyValues(tableData),rs);
runStatementOnDriver("insert into " + Table.NONACIDORCTBL + "(a,b) select a,b from "+ Table.ACIDTBLPART+ " where p = 1");
List rs2=runStatementOnDriver("select a,b from " + Table.NONACIDORCTBL);
Assert.assertEquals("Insert into " + Table.NONACIDORCTBL + " didn't match:",stringifyValues(tableData),rs2);
}
APIUtilityVerifier IterativeVerifier BranchVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
/**
* Test the query correctness and directory layout after ACID table conversion and MAJOR compaction
* @throws Exception
*/
@Test public void testNonAcidToAcidConversionAndMajorCompaction() throws Exception {
FileSystem fs=FileSystem.get(hiveConf);
FileStatus[] status;
runStatementOnDriver("insert into " + Table.NONACIDORCTBL + "(a,b) values(1,2)");
status=fs.listStatus(new Path(TEST_WAREHOUSE_DIR + "/" + (Table.NONACIDORCTBL).toString().toLowerCase()),FileUtils.STAGING_DIR_PATH_FILTER);
Assert.assertEquals(BUCKET_COUNT,status.length);
for (int i=0; i < status.length; i++) {
Assert.assertTrue(status[i].getPath().getName().matches("00000[01]_0"));
}
List rs=runStatementOnDriver("select a,b from " + Table.NONACIDORCTBL);
int[][] resultData=new int[][]{{1,2}};
Assert.assertEquals(stringifyValues(resultData),rs);
rs=runStatementOnDriver("select count(*) from " + Table.NONACIDORCTBL);
int resultCount=1;
Assert.assertEquals(resultCount,Integer.parseInt(rs.get(0)));
runStatementOnDriver("alter table " + Table.NONACIDORCTBL + " SET TBLPROPERTIES ('transactional'='true')");
status=fs.listStatus(new Path(TEST_WAREHOUSE_DIR + "/" + (Table.NONACIDORCTBL).toString().toLowerCase()),FileUtils.STAGING_DIR_PATH_FILTER);
Assert.assertEquals(BUCKET_COUNT,status.length);
for (int i=0; i < status.length; i++) {
Assert.assertTrue(status[i].getPath().getName().matches("00000[01]_0"));
}
rs=runStatementOnDriver("select a,b from " + Table.NONACIDORCTBL);
resultData=new int[][]{{1,2}};
Assert.assertEquals(stringifyValues(resultData),rs);
rs=runStatementOnDriver("select count(*) from " + Table.NONACIDORCTBL);
resultCount=1;
Assert.assertEquals(resultCount,Integer.parseInt(rs.get(0)));
runStatementOnDriver("insert into " + Table.NONACIDORCTBL + "(a,b) values(3,4)");
status=fs.listStatus(new Path(TEST_WAREHOUSE_DIR + "/" + (Table.NONACIDORCTBL).toString().toLowerCase()),FileUtils.STAGING_DIR_PATH_FILTER);
Assert.assertEquals(3,status.length);
boolean sawNewDelta=false;
for (int i=0; i < status.length; i++) {
if (status[i].getPath().getName().matches("delta_.*")) {
sawNewDelta=true;
FileStatus[] buckets=fs.listStatus(status[i].getPath(),FileUtils.STAGING_DIR_PATH_FILTER);
Assert.assertEquals(BUCKET_COUNT,buckets.length);
Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]"));
Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]"));
}
else {
Assert.assertTrue(status[i].getPath().getName().matches("00000[01]_0"));
}
}
Assert.assertTrue(sawNewDelta);
rs=runStatementOnDriver("select a,b from " + Table.NONACIDORCTBL);
resultData=new int[][]{{1,2},{3,4}};
Assert.assertEquals(stringifyValues(resultData),rs);
rs=runStatementOnDriver("select count(*) from " + Table.NONACIDORCTBL);
resultCount=2;
Assert.assertEquals(resultCount,Integer.parseInt(rs.get(0)));
runStatementOnDriver("alter table " + Table.NONACIDORCTBL + " compact 'MAJOR'");
Worker w=new Worker();
w.setThreadId((int)w.getId());
w.setHiveConf(hiveConf);
AtomicBoolean stop=new AtomicBoolean();
AtomicBoolean looped=new AtomicBoolean();
stop.set(true);
w.init(stop,looped);
w.run();
status=fs.listStatus(new Path(TEST_WAREHOUSE_DIR + "/" + (Table.NONACIDORCTBL).toString().toLowerCase()),FileUtils.STAGING_DIR_PATH_FILTER);
Assert.assertEquals(4,status.length);
boolean sawNewBase=false;
for (int i=0; i < status.length; i++) {
if (status[i].getPath().getName().matches("base_.*")) {
sawNewBase=true;
FileStatus[] buckets=fs.listStatus(status[i].getPath(),FileUtils.STAGING_DIR_PATH_FILTER);
Assert.assertEquals(BUCKET_COUNT,buckets.length);
Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]"));
Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]"));
}
}
Assert.assertTrue(sawNewBase);
rs=runStatementOnDriver("select a,b from " + Table.NONACIDORCTBL);
resultData=new int[][]{{1,2},{3,4}};
Assert.assertEquals(stringifyValues(resultData),rs);
rs=runStatementOnDriver("select count(*) from " + Table.NONACIDORCTBL);
resultCount=2;
Assert.assertEquals(resultCount,Integer.parseInt(rs.get(0)));
String fakeFile0=TEST_WAREHOUSE_DIR + "/" + (Table.NONACIDORCTBL).toString().toLowerCase()+ "/subdir/000000_0";
String fakeFile1=TEST_WAREHOUSE_DIR + "/" + (Table.NONACIDORCTBL).toString().toLowerCase()+ "/subdir/000000_1";
fs.create(new Path(fakeFile0));
fs.create(new Path(fakeFile1));
status=fs.listStatus(new Path(TEST_WAREHOUSE_DIR + "/" + (Table.NONACIDORCTBL).toString().toLowerCase()),FileUtils.STAGING_DIR_PATH_FILTER);
Assert.assertEquals(5,status.length);
Cleaner c=new Cleaner();
c.setThreadId((int)c.getId());
c.setHiveConf(hiveConf);
stop=new AtomicBoolean();
looped=new AtomicBoolean();
stop.set(true);
c.init(stop,looped);
c.run();
status=fs.listStatus(new Path(TEST_WAREHOUSE_DIR + "/" + (Table.NONACIDORCTBL).toString().toLowerCase()),FileUtils.STAGING_DIR_PATH_FILTER);
Assert.assertEquals(1,status.length);
Assert.assertTrue(status[0].getPath().getName().matches("base_.*"));
FileStatus[] buckets=fs.listStatus(status[0].getPath(),FileUtils.STAGING_DIR_PATH_FILTER);
Assert.assertEquals(BUCKET_COUNT,buckets.length);
Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]"));
Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]"));
rs=runStatementOnDriver("select a,b from " + Table.NONACIDORCTBL);
resultData=new int[][]{{1,2},{3,4}};
Assert.assertEquals(stringifyValues(resultData),rs);
rs=runStatementOnDriver("select count(*) from " + Table.NONACIDORCTBL);
resultCount=2;
Assert.assertEquals(resultCount,Integer.parseInt(rs.get(0)));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
/**
* HIVE-12353
* @throws Exception
*/
@Test public void testInitiatorWithMultipleFailedCompactions() throws Exception {
String tblName="hive12353";
runStatementOnDriver("drop table if exists " + tblName);
runStatementOnDriver("CREATE TABLE " + tblName + "(a INT, b STRING) "+ " CLUSTERED BY(a) INTO 1 BUCKETS"+ " STORED AS ORC TBLPROPERTIES ('transactional'='true')");
hiveConf.setIntVar(HiveConf.ConfVars.HIVE_COMPACTOR_DELTA_NUM_THRESHOLD,4);
for (int i=0; i < 5; i++) {
runStatementOnDriver("insert into " + tblName + " values("+ (i + 1)+ ", 'foo'),("+ (i + 2)+ ", 'bar'),("+ (i + 3)+ ", 'baz')");
}
hiveConf.setBoolVar(HiveConf.ConfVars.HIVETESTMODEFAILCOMPACTION,true);
int numFailedCompactions=hiveConf.getIntVar(HiveConf.ConfVars.COMPACTOR_INITIATOR_FAILED_THRESHOLD);
TxnStore txnHandler=TxnUtils.getTxnStore(hiveConf);
AtomicBoolean stop=new AtomicBoolean(true);
for (int i=0; i < numFailedCompactions; i++) {
txnHandler.compact(new CompactionRequest("default",tblName,CompactionType.MINOR));
runWorker(hiveConf);
}
Initiator init=new Initiator();
init.setThreadId((int)init.getId());
init.setHiveConf(hiveConf);
init.init(stop,new AtomicBoolean());
init.run();
CompactionsByState cbs=countCompacts(txnHandler);
Assert.assertEquals("Unexpected number of failed compactions",numFailedCompactions,cbs.failed);
Assert.assertEquals("Unexpected total number of compactions",numFailedCompactions,cbs.total);
hiveConf.setTimeVar(HiveConf.ConfVars.COMPACTOR_HISTORY_REAPER_INTERVAL,10,TimeUnit.MILLISECONDS);
AcidCompactionHistoryService compactionHistoryService=new AcidCompactionHistoryService();
runHouseKeeperService(compactionHistoryService,hiveConf);
cbs=countCompacts(txnHandler);
Assert.assertEquals("Number of failed compactions after History clean",numFailedCompactions,cbs.failed);
Assert.assertEquals("Total number of compactions after History clean",numFailedCompactions,cbs.total);
txnHandler.compact(new CompactionRequest("default",tblName,CompactionType.MAJOR));
runWorker(hiveConf);
txnHandler.compact(new CompactionRequest("default",tblName,CompactionType.MINOR));
runWorker(hiveConf);
cbs=countCompacts(txnHandler);
Assert.assertEquals("Unexpected num failed1",numFailedCompactions + 2,cbs.failed);
Assert.assertEquals("Unexpected num total1",numFailedCompactions + 2,cbs.total);
runHouseKeeperService(compactionHistoryService,hiveConf);
cbs=countCompacts(txnHandler);
Assert.assertEquals("Unexpected num failed2",hiveConf.getIntVar(HiveConf.ConfVars.COMPACTOR_HISTORY_RETENTION_FAILED),cbs.failed);
Assert.assertEquals("Unexpected num total2",hiveConf.getIntVar(HiveConf.ConfVars.COMPACTOR_HISTORY_RETENTION_FAILED),cbs.total);
hiveConf.setBoolVar(HiveConf.ConfVars.HIVETESTMODEFAILCOMPACTION,false);
txnHandler.compact(new CompactionRequest("default",tblName,CompactionType.MINOR));
cbs=countCompacts(txnHandler);
Assert.assertEquals("Unexpected num failed3",hiveConf.getIntVar(HiveConf.ConfVars.COMPACTOR_HISTORY_RETENTION_FAILED),cbs.failed);
Assert.assertEquals("Unexpected num initiated",1,cbs.initiated);
Assert.assertEquals("Unexpected num total3",hiveConf.getIntVar(HiveConf.ConfVars.COMPACTOR_HISTORY_RETENTION_FAILED) + 1,cbs.total);
runWorker(hiveConf);
cbs=countCompacts(txnHandler);
Assert.assertEquals("Unexpected num failed4",hiveConf.getIntVar(HiveConf.ConfVars.COMPACTOR_HISTORY_RETENTION_FAILED),cbs.failed);
Assert.assertEquals("Unexpected num ready to clean",1,cbs.readyToClean);
Assert.assertEquals("Unexpected num total4",hiveConf.getIntVar(HiveConf.ConfVars.COMPACTOR_HISTORY_RETENTION_FAILED) + 1,cbs.total);
runCleaner(hiveConf);
runHouseKeeperService(compactionHistoryService,hiveConf);
cbs=countCompacts(txnHandler);
Assert.assertEquals("Unexpected num failed5",hiveConf.getIntVar(HiveConf.ConfVars.COMPACTOR_HISTORY_RETENTION_FAILED),cbs.failed);
Assert.assertEquals("Unexpected num succeeded",1,cbs.succeeded);
Assert.assertEquals("Unexpected num total5",hiveConf.getIntVar(HiveConf.ConfVars.COMPACTOR_HISTORY_RETENTION_FAILED) + 1,cbs.total);
}
EqualityVerifier
/**
* HIVE-12352 has details
* @throws Exception
*/
@Test public void writeBetweenWorkerAndCleaner() throws Exception {
String tblName="hive12352";
runStatementOnDriver("drop table if exists " + tblName);
runStatementOnDriver("CREATE TABLE " + tblName + "(a INT, b STRING) "+ " CLUSTERED BY(a) INTO 1 BUCKETS"+ " STORED AS ORC TBLPROPERTIES ('transactional'='true')");
runStatementOnDriver("insert into " + tblName + " values(1, 'foo'),(2, 'bar'),(3, 'baz')");
runStatementOnDriver("update " + tblName + " set b = 'blah' where a = 3");
TxnStore txnHandler=TxnUtils.getTxnStore(hiveConf);
txnHandler.compact(new CompactionRequest("default",tblName,CompactionType.MINOR));
Worker t=new Worker();
t.setThreadId((int)t.getId());
t.setHiveConf(hiveConf);
AtomicBoolean stop=new AtomicBoolean(true);
AtomicBoolean looped=new AtomicBoolean();
t.init(stop,looped);
t.run();
hiveConf.setBoolVar(HiveConf.ConfVars.HIVETESTMODEROLLBACKTXN,true);
runStatementOnDriver("delete from " + tblName + " where a = 1");
hiveConf.setBoolVar(HiveConf.ConfVars.HIVETESTMODEROLLBACKTXN,false);
List expected=new ArrayList<>();
expected.add("1\tfoo");
expected.add("2\tbar");
expected.add("3\tblah");
Assert.assertEquals("",expected,runStatementOnDriver("select a,b from " + tblName + " order by a"));
Cleaner c=new Cleaner();
c.setThreadId((int)c.getId());
c.setHiveConf(hiveConf);
c.init(stop,new AtomicBoolean());
c.run();
Initiator i=new Initiator();
i.setThreadId((int)i.getId());
i.setHiveConf(hiveConf);
i.init(stop,new AtomicBoolean());
i.run();
Assert.assertEquals("",expected,runStatementOnDriver("select a,b from " + tblName + " order by a"));
}
EqualityVerifier
@Test public void testInsertOverwriteWithSelfJoin() throws Exception {
int[][] part1Data={{1,7}};
runStatementOnDriver("insert into " + Table.NONACIDORCTBL + "(a,b) "+ makeValuesClause(part1Data));
runStatementOnDriver("insert overwrite table " + Table.NONACIDORCTBL + " select 2, 9 from "+ Table.NONACIDORCTBL+ " T inner join "+ Table.NONACIDORCTBL+ " S on T.a=S.a");
List rs=runStatementOnDriver("select a,b from " + Table.NONACIDORCTBL + " order by a,b");
int[][] joinData={{2,9}};
Assert.assertEquals("Self join non-part insert overwrite failed",stringifyValues(joinData),rs);
int[][] part2Data={{1,8}};
runStatementOnDriver("insert into " + Table.NONACIDPART + " partition(p=1) (a,b) "+ makeValuesClause(part1Data));
runStatementOnDriver("insert into " + Table.NONACIDPART + " partition(p=2) (a,b) "+ makeValuesClause(part2Data));
runStatementOnDriver("insert overwrite table " + Table.NONACIDPART + " partition(p=1) select a,b from "+ Table.NONACIDPART);
List rs2=runStatementOnDriver("select a,b from " + Table.NONACIDPART + " order by a,b");
int[][] updatedData={{1,7},{1,8},{1,8}};
Assert.assertEquals("Insert overwrite partition failed",stringifyValues(updatedData),rs2);
}
EqualityVerifier
@Test public void testDeleteIn() throws Exception {
int[][] tableData={{1,2},{3,2},{5,2},{1,3},{3,3},{5,3}};
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) "+ makeValuesClause(tableData));
runStatementOnDriver("insert into " + Table.NONACIDORCTBL + "(a,b) values(1,7),(3,7)");
runStatementOnDriver("delete from " + Table.ACIDTBL + " where a in(select a from "+ Table.NONACIDORCTBL+ ")");
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) select a,b from "+ Table.NONACIDORCTBL);
List rs=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
int[][] updatedData={{1,7},{3,7},{5,2},{5,3}};
Assert.assertEquals("Bulk update failed",stringifyValues(updatedData),rs);
runStatementOnDriver("update " + Table.ACIDTBL + " set b=19 where b in(select b from "+ Table.NONACIDORCTBL+ " where a = 3)");
List rs2=runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
int[][] updatedData2={{1,19},{3,19},{5,2},{5,3}};
Assert.assertEquals("Bulk update2 failed",stringifyValues(updatedData2),rs2);
}
Class: org.apache.hadoop.hive.ql.exec.TestFileSinkOperator EqualityVerifier
@Test public void testUpdateDynamicPartitioning() throws Exception {
setBasePath("updateDP");
setupData(DataFormat.WITH_RECORD_ID_AND_PARTITION_VALUE);
FileSinkOperator op=getFileSink(AcidUtils.Operation.UPDATE,true,2);
processRows(op);
Assert.assertEquals("0",TFSOStatsPublisher.stats.get(StatsSetupConst.ROW_COUNT));
confirmOutput(DataFormat.WITH_RECORD_ID_AND_PARTITION_VALUE);
}
EqualityVerifier
@Test public void testUpdate() throws Exception {
setBasePath("update");
setupData(DataFormat.WITH_RECORD_ID);
FileSinkOperator op=getFileSink(AcidUtils.Operation.UPDATE,false,2);
processRows(op);
Assert.assertEquals("0",TFSOStatsPublisher.stats.get(StatsSetupConst.ROW_COUNT));
confirmOutput(DataFormat.WITH_RECORD_ID);
}
EqualityVerifier
@Test public void testInsert() throws Exception {
setBasePath("insert");
setupData(DataFormat.WITH_PARTITION_VALUE);
FileSinkOperator op=getFileSink(AcidUtils.Operation.INSERT,false,1);
processRows(op);
Assert.assertEquals("10",TFSOStatsPublisher.stats.get(StatsSetupConst.ROW_COUNT));
confirmOutput(DataFormat.WITH_PARTITION_VALUE);
}
EqualityVerifier
@Test public void testDelete() throws Exception {
setBasePath("delete");
setupData(DataFormat.WITH_RECORD_ID);
FileSinkOperator op=getFileSink(AcidUtils.Operation.DELETE,false,2);
processRows(op);
Assert.assertEquals("-10",TFSOStatsPublisher.stats.get(StatsSetupConst.ROW_COUNT));
confirmOutput(DataFormat.WITH_RECORD_ID);
}
EqualityVerifier
@Test public void testInsertDynamicPartitioning() throws Exception {
setBasePath("insertDP");
setupData(DataFormat.WITH_PARTITION_VALUE);
FileSinkOperator op=getFileSink(AcidUtils.Operation.INSERT,true,1);
processRows(op);
Assert.assertEquals("5",TFSOStatsPublisher.stats.get(StatsSetupConst.ROW_COUNT));
confirmOutput(DataFormat.WITH_PARTITION_VALUE);
}
EqualityVerifier
@Test public void testDeleteDynamicPartitioning() throws Exception {
setBasePath("deleteDP");
setupData(DataFormat.WITH_RECORD_ID);
FileSinkOperator op=getFileSink(AcidUtils.Operation.DELETE,true,2);
processRows(op);
Assert.assertEquals("-5",TFSOStatsPublisher.stats.get(StatsSetupConst.ROW_COUNT));
confirmOutput(DataFormat.WITH_RECORD_ID);
}
Class: org.apache.hadoop.hive.ql.exec.TestOperators InternalCallVerifier EqualityVerifier
@Test public void testFetchOperatorContextQuoting() throws Exception {
JobConf conf=new JobConf();
ArrayList list=new ArrayList();
list.add(new Path("hdfs://nn.example.com/fi\tl\\e\t1"));
list.add(new Path("hdfs://nn.example.com/file\t2"));
list.add(new Path("file:/file3"));
FetchOperator.setFetchOperatorContext(conf,list);
String[] parts=conf.get(FetchOperator.FETCH_OPERATOR_DIRECTORY_LIST).split("\t");
assertEquals(3,parts.length);
assertEquals("hdfs://nn.example.com/fi\\tl\\\\e\\t1",parts[0]);
assertEquals("hdfs://nn.example.com/file\\t2",parts[1]);
assertEquals("file:/file3",parts[2]);
}
InternalCallVerifier EqualityVerifier
@Test public void testFetchOperatorContext() throws Exception {
HiveConf conf=new HiveConf();
conf.set("hive.support.concurrency","false");
conf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE,"nonstrict");
SessionState.start(conf);
String cmd="create table fetchOp (id int, name string) " + "partitioned by (state string) " + "row format delimited fields terminated by '|' "+ "stored as "+ "inputformat 'org.apache.hadoop.hive.ql.exec.TestOperators$CustomInFmt' "+ "outputformat 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' "+ "tblproperties ('myprop1'='val1', 'myprop2' = 'val2')";
Driver driver=new Driver();
driver.init();
CommandProcessorResponse response=driver.run(cmd);
assertEquals(0,response.getResponseCode());
List result=new ArrayList();
cmd="load data local inpath '../data/files/employee.dat' " + "overwrite into table fetchOp partition (state='CA')";
driver.init();
response=driver.run(cmd);
assertEquals(0,response.getResponseCode());
cmd="load data local inpath '../data/files/employee2.dat' " + "overwrite into table fetchOp partition (state='OR')";
driver.init();
response=driver.run(cmd);
assertEquals(0,response.getResponseCode());
cmd="select * from fetchOp";
driver.init();
driver.setMaxRows(500);
response=driver.run(cmd);
assertEquals(0,response.getResponseCode());
driver.getResults(result);
assertEquals(20,result.size());
driver.close();
}
Class: org.apache.hadoop.hive.ql.exec.errors.TestTaskLogProcessor InternalCallVerifier EqualityVerifier
@Test public void testGetStackTraces() throws Exception {
JobConf jobConf=new JobConf();
jobConf.set(HiveConf.ConfVars.HIVEQUERYSTRING.varname,"select * from foo group by moo;");
final TaskLogProcessor taskLogProcessor=new TaskLogProcessor(jobConf);
Throwable oome=new OutOfMemoryError("java heap space");
String oomeStr=writeThrowableAsFile("Some line in the beginning\n",oome,null,"1",taskLogProcessor);
Throwable compositeException=new InvocationTargetException(new IOException(new NullPointerException()));
String compositeStr=writeThrowableAsFile(null,compositeException,"Some line in the end.\n","2",taskLogProcessor);
Throwable eofe=new EOFException();
String eofeStr=writeThrowableAsFile("line a\nlineb\n",eofe," line c\nlineD\n","3",taskLogProcessor);
List> stackTraces=taskLogProcessor.getStackTraces();
assertEquals(3,stackTraces.size());
checkException(oomeStr,stackTraces.get(0));
checkException(compositeStr,stackTraces.get(1));
checkException(eofeStr,stackTraces.get(2));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testDataCorruptErrorHeuristic() throws Exception {
JobConf jobConf=new JobConf();
jobConf.set(HiveConf.ConfVars.HIVEQUERYSTRING.varname,"select * from foo group by moo;");
final TaskLogProcessor taskLogProcessor=new TaskLogProcessor(jobConf);
String badFile1="hdfs://localhost/foo1/moo1/zoo1";
String badFile2="hdfs://localhost/foo2/moo2/zoo2";
String content="line a\nlineb\n" + "split: " + badFile1 + " is very bad.\n"+ " line c\nlineD\n"+ "split: "+ badFile2+ " is also very bad.\n"+ " java.io.EOFException: null \n"+ "line E\n";
File log3File=writeTestLog("1",content);
taskLogProcessor.addTaskAttemptLogUrl(log3File.toURI().toURL().toString());
List errList=taskLogProcessor.getErrors();
assertEquals(1,errList.size());
final ErrorAndSolution eas=errList.get(0);
String error=eas.getError();
assertNotNull(error);
assertTrue(error.contains(badFile1) || error.contains(badFile2));
String solution=eas.getSolution();
assertNotNull(solution);
assertTrue(solution.length() > 0);
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testMapAggrMemErrorHeuristic() throws Exception {
JobConf jobConf=new JobConf();
jobConf.set(HiveConf.ConfVars.HIVEQUERYSTRING.varname,"select * from foo group by moo;");
final TaskLogProcessor taskLogProcessor=new TaskLogProcessor(jobConf);
Throwable oome=new OutOfMemoryError("java heap space");
File log1File=writeTestLog("1",toString(oome));
taskLogProcessor.addTaskAttemptLogUrl(log1File.toURI().toURL().toString());
List errList=taskLogProcessor.getErrors();
assertEquals(1,errList.size());
final ErrorAndSolution eas=errList.get(0);
String error=eas.getError();
assertNotNull(error);
assertTrue(error.contains("memory"));
String solution=eas.getSolution();
assertNotNull(solution);
assertTrue(solution.length() > 0);
String confName=HiveConf.ConfVars.HIVEMAPAGGRHASHMEMORY.toString();
assertTrue(solution.contains(confName));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testScriptErrorHeuristic() throws Exception {
JobConf jobConf=new JobConf();
jobConf.set(HiveConf.ConfVars.HIVEQUERYSTRING.varname,"select * from foo group by moo;");
final TaskLogProcessor taskLogProcessor=new TaskLogProcessor(jobConf);
String errorCode="7874";
String content="line a\nlineb\n" + "Script failed with code " + errorCode + " line c\nlineD\n";
File log3File=writeTestLog("1",content);
taskLogProcessor.addTaskAttemptLogUrl(log3File.toURI().toURL().toString());
List errList=taskLogProcessor.getErrors();
assertEquals(1,errList.size());
final ErrorAndSolution eas=errList.get(0);
String error=eas.getError();
assertNotNull(error);
assertTrue(error.indexOf(errorCode) >= 0);
String solution=eas.getSolution();
assertNotNull(solution);
assertTrue(solution.length() > 0);
}
Class: org.apache.hadoop.hive.ql.exec.persistence.TestBytesBytesMultiHashMap InternalCallVerifier EqualityVerifier
@Test public void testPutWithFullMap() throws Exception {
BytesBytesMultiHashMap map=new BytesBytesMultiHashMap(CAPACITY,1f,WB_SIZE);
UniqueKeysKvSource kv=new UniqueKeysKvSource();
for (int i=0; i < CAPACITY; ++i) {
map.put(kv,-1);
}
for (int i=0; i < kv.keys.size(); ++i) {
verifyHashMapResult(map,kv.keys.get(i),kv.values.get(i));
}
assertEquals(CAPACITY,map.getCapacity());
BytesBytesMultiHashMap.Result hashMapResult=new BytesBytesMultiHashMap.Result();
map.getValueResult(new byte[0],0,0,hashMapResult);
}
EqualityVerifier
@Test public void testCapacityValidation(){
BytesBytesMultiHashMap map=new BytesBytesMultiHashMap(CAPACITY,LOAD_FACTOR,WB_SIZE);
assertEquals(CAPACITY,map.getCapacity());
map=new BytesBytesMultiHashMap(9,LOAD_FACTOR,WB_SIZE);
assertEquals(16,map.getCapacity());
BytesBytesMultiHashMap map1=new BytesBytesMultiHashMap(1024,(float)0.75,524288,1);
}
InternalCallVerifier EqualityVerifier
@Test public void testExpand() throws Exception {
BytesBytesMultiHashMap map=new BytesBytesMultiHashMap(1,0.0000001f,WB_SIZE);
UniqueKeysKvSource kv=new UniqueKeysKvSource();
for (int i=0; i < 18; ++i) {
map.put(kv,-1);
for (int j=0; j <= i; ++j) {
verifyHashMapResult(map,kv.keys.get(j),kv.values.get(j));
}
}
assertEquals(1 << 18,map.getCapacity());
}
Class: org.apache.hadoop.hive.ql.exec.persistence.TestMapJoinEqualityTableContainer BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testContainerBasics() throws Exception {
container.put(KEY1,rowContainer);
container.put(KEY2,rowContainer);
container.put(KEY3,rowContainer);
container.put(KEY4,rowContainer);
Assert.assertEquals(4,container.size());
Map localContainer=new HashMap();
for ( Entry entry : container.entrySet()) {
localContainer.put(entry.getKey(),entry.getValue());
}
Utilities.testEquality(container.get(KEY1),localContainer.get(KEY1));
Utilities.testEquality(container.get(KEY2),localContainer.get(KEY2));
Utilities.testEquality(container.get(KEY3),localContainer.get(KEY3));
Utilities.testEquality(container.get(KEY4),localContainer.get(KEY4));
container.clear();
Assert.assertEquals(0,container.size());
Assert.assertTrue(container.entrySet().isEmpty());
}
Class: org.apache.hadoop.hive.ql.exec.persistence.TestMapJoinRowContainer APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testSerialization() throws Exception {
MapJoinRowContainer container1=new MapJoinEagerRowContainer();
container1.addRow(new Object[]{new Text("f0"),null,new ShortWritable((short)0xf)});
container1.addRow(Arrays.asList(new Object[]{null,new Text("f1"),new ShortWritable((short)0xf)}));
container1.addRow(new Object[]{null,null,new ShortWritable((short)0xf)});
container1.addRow(Arrays.asList(new Object[]{new Text("f0"),new Text("f1"),new ShortWritable((short)0x1)}));
MapJoinRowContainer container2=Utilities.serde(container1,"f0,f1,filter","string,string,smallint");
Utilities.testEquality(container1,container2);
Assert.assertEquals(4,container1.rowCount());
Assert.assertEquals(1,container2.getAliasFilter());
}
Class: org.apache.hadoop.hive.ql.exec.persistence.TestMapJoinTableContainer APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testDummyContainer() throws Exception {
MapJoinTableContainerSerDe.persistDummyTable(out);
out.close();
in=new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray()));
container=containerSerde.load(in);
Assert.assertEquals(0,container.size());
Assert.assertTrue(container.entrySet().isEmpty());
}
Class: org.apache.hadoop.hive.ql.exec.tez.InputSplitComparatorTest EqualityVerifier
@Test public void testCompare1() throws Exception {
FileSplit split1=new FileSplit(new Path("/abc/def"),2000L,500L,EMPTY);
FileSplit split2=new FileSplit(new Path("/abc/def"),1000L,500L,EMPTY);
InputSplitComparator comparator=new InputSplitComparator();
assertEquals(1,comparator.compare(split1,split2));
}
Class: org.apache.hadoop.hive.ql.exec.tez.TestHostAffinitySplitLocationProvider InternalCallVerifier EqualityVerifier
@Test(timeout=5000) public void testNonFileSplits() throws IOException {
HostAffinitySplitLocationProvider locationProvider=new HostAffinitySplitLocationProvider(executorLocations);
InputSplit inputSplit1=createMockInputSplit(new String[]{locations[0],locations[1]});
InputSplit inputSplit2=createMockInputSplit(new String[]{locations[2],locations[3]});
assertArrayEquals(new String[]{locations[0],locations[1]},locationProvider.getLocations(inputSplit1));
assertArrayEquals(new String[]{locations[2],locations[3]},locationProvider.getLocations(inputSplit2));
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test(timeout=5000) public void testOrcSplitsLocationAffinity() throws IOException {
HostAffinitySplitLocationProvider locationProvider=new HostAffinitySplitLocationProvider(executorLocations);
InputSplit os11=createMockFileSplit(true,"path1",0,15000,new String[]{locations[0],locations[1]});
InputSplit os12=createMockFileSplit(true,"path1",0,30000,new String[]{locations[0],locations[1]});
InputSplit os13=createMockFileSplit(true,"path1",15000,30000,new String[]{locations[0],locations[1]});
String[] retLoc11=locationProvider.getLocations(os11);
String[] retLoc12=locationProvider.getLocations(os12);
String[] retLoc13=locationProvider.getLocations(os13);
assertEquals(1,retLoc11.length);
assertFalse(locationsSet.contains(retLoc11[0]));
assertTrue(executorLocationsSet.contains(retLoc11[0]));
assertEquals(1,retLoc12.length);
assertFalse(locationsSet.contains(retLoc12[0]));
assertTrue(executorLocationsSet.contains(retLoc12[0]));
assertEquals(1,retLoc13.length);
assertFalse(locationsSet.contains(retLoc13[0]));
assertTrue(executorLocationsSet.contains(retLoc13[0]));
assertEquals(retLoc11[0],retLoc12[0]);
assertNotEquals(retLoc11[0],retLoc13[0]);
String[] retLoc112=locationProvider.getLocations(os11);
String[] retLoc122=locationProvider.getLocations(os12);
String[] retLoc132=locationProvider.getLocations(os13);
assertArrayEquals(retLoc11,retLoc112);
assertArrayEquals(retLoc12,retLoc122);
assertArrayEquals(retLoc13,retLoc132);
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test(timeout=5000) public void testOrcSplitsBasic() throws IOException {
HostAffinitySplitLocationProvider locationProvider=new HostAffinitySplitLocationProvider(executorLocations);
InputSplit os1=createMockFileSplit(true,"path1",0,1000,new String[]{locations[0],locations[1]});
InputSplit os2=createMockFileSplit(true,"path2",0,2000,new String[]{locations[2],locations[3]});
InputSplit os3=createMockFileSplit(true,"path3",1000,2000,new String[]{locations[0],locations[3]});
String[] retLoc1=locationProvider.getLocations(os1);
String[] retLoc2=locationProvider.getLocations(os2);
String[] retLoc3=locationProvider.getLocations(os3);
assertEquals(1,retLoc1.length);
assertFalse(locationsSet.contains(retLoc1[0]));
assertTrue(executorLocationsSet.contains(retLoc1[0]));
assertEquals(1,retLoc2.length);
assertFalse(locationsSet.contains(retLoc2[0]));
assertTrue(executorLocationsSet.contains(retLoc2[0]));
assertEquals(1,retLoc3.length);
assertFalse(locationsSet.contains(retLoc3[0]));
assertTrue(executorLocationsSet.contains(retLoc3[0]));
}
Class: org.apache.hadoop.hive.ql.exec.tez.TestTezSessionPool IterativeVerifier UtilityVerifier EqualityVerifier HybridVerifier
@Test public void testSessionPoolThreads(){
try {
conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS,false);
conf.setVar(ConfVars.HIVE_SERVER2_TEZ_DEFAULT_QUEUES,"0,1,2");
conf.setIntVar(ConfVars.HIVE_SERVER2_TEZ_SESSIONS_PER_DEFAULT_QUEUE,4);
conf.setIntVar(ConfVars.HIVE_SERVER2_TEZ_SESSION_MAX_INIT_THREADS,16);
poolManager=new TestTezSessionPoolManager();
poolManager.setupPool(conf);
poolManager.startPool();
TezSessionState[] sessions=new TezSessionState[12];
int[] queueCounts=new int[3];
for (int i=0; i < sessions.length; ++i) {
sessions[i]=poolManager.getSession(null,conf,true,false);
queueCounts[Integer.parseInt(sessions[i].getQueueName())]+=1;
}
for (int i=0; i < queueCounts.length; ++i) {
assertEquals(4,queueCounts[i]);
}
for (int i=0; i < sessions.length; ++i) {
poolManager.returnSession(sessions[i],false);
}
}
catch ( Exception e) {
e.printStackTrace();
fail();
}
}
BranchVerifier UtilityVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testSessionPoolGetInOrder(){
try {
conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS,false);
conf.setVar(ConfVars.HIVE_SERVER2_TEZ_DEFAULT_QUEUES,"a,b,c");
conf.setIntVar(ConfVars.HIVE_SERVER2_TEZ_SESSIONS_PER_DEFAULT_QUEUE,2);
conf.setIntVar(ConfVars.HIVE_SERVER2_TEZ_SESSION_MAX_INIT_THREADS,1);
poolManager=new TestTezSessionPoolManager();
poolManager.setupPool(conf);
poolManager.startPool();
TezSessionState sessionState=poolManager.getSession(null,conf,true,false);
assertEquals("a",sessionState.getQueueName());
poolManager.returnSession(sessionState,false);
sessionState=poolManager.getSession(null,conf,true,false);
assertEquals("b",sessionState.getQueueName());
poolManager.returnSession(sessionState,false);
sessionState=poolManager.getSession(null,conf,true,false);
assertEquals("c",sessionState.getQueueName());
poolManager.returnSession(sessionState,false);
sessionState=poolManager.getSession(null,conf,true,false);
if (sessionState.getQueueName().compareTo("a") != 0) {
fail();
}
poolManager.returnSession(sessionState,false);
}
catch ( Exception e) {
e.printStackTrace();
fail();
}
}
Class: org.apache.hadoop.hive.ql.exec.tez.TestTezTask InternalCallVerifier EqualityVerifier
@Test public void testEmptyWork() throws IllegalArgumentException, IOException, Exception {
DAG dag=task.build(conf,new TezWork("",null),path,appLr,null,new Context(conf));
assertEquals(dag.getVertices().size(),0);
}
InternalCallVerifier EqualityVerifier
@Test public void testGetExtraLocalResources() throws Exception {
final String[] inputOutputJars=new String[]{"file:///tmp/foo.jar"};
LocalResource res=mock(LocalResource.class);
final List resources=Collections.singletonList(res);
final Map resMap=new HashMap();
resMap.put("foo.jar",res);
when(utils.localizeTempFiles(path.toString(),conf,inputOutputJars)).thenReturn(resources);
when(utils.getBaseName(res)).thenReturn("foo.jar");
assertEquals(resMap,task.getExtraLocalResources(conf,path,inputOutputJars));
}
Class: org.apache.hadoop.hive.ql.exec.vector.TestListColumnVector InternalCallVerifier EqualityVerifier
@Test public void testSet() throws Exception {
LongColumnVector input1=new LongColumnVector(10);
ListColumnVector input=new ListColumnVector(10,input1);
input.init();
LongColumnVector output1=new LongColumnVector(30);
ListColumnVector output=new ListColumnVector(10,output1);
output.init();
input.noNulls=false;
input.isNull[6]=true;
input.childCount=11;
Arrays.fill(output1.vector,-1);
for (int i=0; i < 10; ++i) {
input1.vector[i]=10 * i;
input.offsets[i]=i;
input.lengths[i]=2;
output.offsets[i]=i + 2;
output.lengths[i]=3;
}
output.childCount=30;
output.setElement(3,6,input);
assertEquals(30,output.childCount);
StringBuilder buf=new StringBuilder();
output.stringifyValue(buf,3);
assertEquals("null",buf.toString());
output.setElement(3,5,input);
assertEquals(30,output.offsets[3]);
assertEquals(2,output.lengths[3]);
assertEquals(32,output.childCount);
buf=new StringBuilder();
output.stringifyValue(buf,3);
assertEquals("[50, 60]",buf.toString());
output.setElement(3,4,input);
assertEquals(34,output.childCount);
assertEquals(34,output1.vector.length);
assertEquals(50,output1.vector[30]);
assertEquals(60,output1.vector[31]);
buf=new StringBuilder();
output.stringifyValue(buf,3);
assertEquals("[40, 50]",buf.toString());
input.reset();
assertEquals(false,input1.isRepeating);
assertEquals(true,input.noNulls);
output.reset();
assertEquals(0,output.childCount);
input.isRepeating=true;
input.offsets[0]=0;
input.lengths[0]=10;
output.setElement(2,7,input);
assertEquals(10,output.childCount);
buf=new StringBuilder();
output.stringifyValue(buf,2);
assertEquals("[0, 10, 20, 30, 40, 50, 60, 70, 80, 90]",buf.toString());
}
IterativeVerifier BranchVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testFlatten() throws Exception {
LongColumnVector col1=new LongColumnVector(10);
ListColumnVector vector=new ListColumnVector(10,col1);
vector.init();
col1.isRepeating=true;
vector.isRepeating=true;
vector.noNulls=false;
vector.isNull[0]=true;
vector.childCount=0;
for (int i=0; i < 10; ++i) {
col1.vector[i]=i + 3;
vector.offsets[i]=i;
vector.lengths[i]=10 + i;
}
vector.flatten(false,null,10);
assertFalse(vector.isRepeating);
assertFalse(vector.noNulls);
assertTrue(col1.isRepeating);
assertTrue(col1.noNulls);
for (int i=0; i < 10; ++i) {
assertTrue("isNull at " + i,vector.isNull[i]);
}
for (int i=0; i < 10; ++i) {
StringBuilder buf=new StringBuilder();
vector.stringifyValue(buf,i);
assertEquals("null",buf.toString());
}
vector.unFlatten();
assertTrue(col1.isRepeating);
assertTrue(vector.isRepeating);
Arrays.fill(vector.isNull,1,10,false);
int[] sel=new int[]{3,5,7};
vector.flatten(true,sel,3);
for (int i=1; i < 10; i++) {
assertEquals("failure at " + i,i == 3 || i == 5 || i == 7,vector.isNull[i]);
}
vector.unFlatten();
vector.noNulls=true;
vector.isRepeating=true;
vector.offsets[0]=0;
vector.lengths[0]=3;
vector.childCount=3;
vector.flatten(false,null,10);
assertFalse(vector.isRepeating);
assertFalse(vector.noNulls);
assertFalse(col1.isRepeating);
assertFalse(col1.noNulls);
for (int i=0; i < 10; ++i) {
assertEquals("offset at " + i,0,vector.offsets[i]);
assertEquals("length at " + i,3,vector.lengths[i]);
}
for (int i=0; i < 10; ++i) {
StringBuilder buf=new StringBuilder();
vector.stringifyValue(buf,i);
assertEquals("[3, 3, 3]",buf.toString());
}
vector.unFlatten();
assertTrue(col1.isRepeating);
assertTrue(col1.noNulls);
assertTrue(vector.isRepeating);
assertTrue(vector.noNulls);
Arrays.fill(vector.offsets,1,10,-1);
Arrays.fill(vector.lengths,1,10,-1);
Arrays.fill(col1.vector,1,10,-1);
vector.flatten(true,sel,3);
for (int i=1; i < 10; i++) {
if (i == 3 || i == 5 || i == 7) {
assertEquals("failure at " + i,0,vector.offsets[i]);
assertEquals("failure at " + i,3,vector.lengths[i]);
}
else {
assertEquals("failure at " + i,-1,vector.offsets[i]);
assertEquals("failure at " + i,-1,vector.lengths[i]);
}
}
for (int i=0; i < 3; ++i) {
assertEquals("failure at " + i,3,col1.vector[i]);
}
for (int i=3; i < 10; ++i) {
assertEquals("failure at " + i,-1,col1.vector[i]);
}
vector.unFlatten();
vector.reset();
assertFalse(col1.isRepeating);
assertTrue(col1.noNulls);
assertFalse(vector.isRepeating);
assertTrue(vector.noNulls);
assertEquals(0,vector.childCount);
}
Class: org.apache.hadoop.hive.ql.exec.vector.TestMapColumnVector InternalCallVerifier EqualityVerifier
@Test public void testSet() throws Exception {
LongColumnVector input1=new LongColumnVector(10);
DoubleColumnVector input2=new DoubleColumnVector(10);
MapColumnVector input=new MapColumnVector(10,input1,input2);
input.init();
LongColumnVector output1=new LongColumnVector(30);
DoubleColumnVector output2=new DoubleColumnVector(30);
MapColumnVector output=new MapColumnVector(10,output1,output2);
output.init();
input.noNulls=false;
input.isNull[6]=true;
input.childCount=11;
Arrays.fill(output1.vector,-1);
for (int i=0; i < 10; ++i) {
input1.vector[i]=10 * i;
input2.vector[i]=100 * i;
input.offsets[i]=i;
input.lengths[i]=2;
output.offsets[i]=i + 2;
output.lengths[i]=3;
}
output.childCount=30;
output.setElement(3,6,input);
assertEquals(30,output.childCount);
StringBuilder buf=new StringBuilder();
output.stringifyValue(buf,3);
assertEquals("null",buf.toString());
output.setElement(3,5,input);
assertEquals(30,output.offsets[3]);
assertEquals(2,output.lengths[3]);
assertEquals(32,output.childCount);
buf=new StringBuilder();
output.stringifyValue(buf,3);
assertEquals("[{\"key\": 50, \"value\": 500.0}," + " {\"key\": 60, \"value\": 600.0}]",buf.toString());
output.setElement(3,4,input);
assertEquals(34,output.childCount);
assertEquals(34,output1.vector.length);
assertEquals(50,output1.vector[30]);
assertEquals(60,output1.vector[31]);
buf=new StringBuilder();
output.stringifyValue(buf,3);
assertEquals("[{\"key\": 40, \"value\": 400.0}," + " {\"key\": 50, \"value\": 500.0}]",buf.toString());
input.reset();
assertEquals(false,input1.isRepeating);
assertEquals(true,input.noNulls);
output.reset();
assertEquals(0,output.childCount);
input.isRepeating=true;
input.offsets[0]=0;
input.lengths[0]=10;
output.setElement(2,7,input);
assertEquals(10,output.childCount);
buf=new StringBuilder();
output.stringifyValue(buf,2);
assertEquals("[{\"key\": 0, \"value\": 0.0}," + " {\"key\": 10, \"value\": 100.0}," + " {\"key\": 20, \"value\": 200.0},"+ " {\"key\": 30, \"value\": 300.0},"+ " {\"key\": 40, \"value\": 400.0},"+ " {\"key\": 50, \"value\": 500.0},"+ " {\"key\": 60, \"value\": 600.0},"+ " {\"key\": 70, \"value\": 700.0},"+ " {\"key\": 80, \"value\": 800.0},"+ " {\"key\": 90, \"value\": 900.0}]",buf.toString());
}
IterativeVerifier BranchVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testFlatten() throws Exception {
LongColumnVector col1=new LongColumnVector(10);
DoubleColumnVector col2=new DoubleColumnVector(10);
MapColumnVector vector=new MapColumnVector(10,col1,col2);
vector.init();
col1.isRepeating=true;
vector.isRepeating=true;
vector.noNulls=false;
vector.isNull[0]=true;
vector.childCount=0;
for (int i=0; i < 10; ++i) {
col1.vector[i]=i + 3;
col2.vector[i]=i * 10;
vector.offsets[i]=i;
vector.lengths[i]=10 + i;
}
vector.flatten(false,null,10);
assertFalse(vector.isRepeating);
assertFalse(vector.noNulls);
assertTrue(col1.isRepeating);
assertTrue(col1.noNulls);
for (int i=0; i < 10; ++i) {
assertTrue("isNull at " + i,vector.isNull[i]);
}
for (int i=0; i < 10; ++i) {
StringBuilder buf=new StringBuilder();
vector.stringifyValue(buf,i);
assertEquals("null",buf.toString());
}
vector.unFlatten();
assertTrue(col1.isRepeating);
assertTrue(vector.isRepeating);
Arrays.fill(vector.isNull,1,10,false);
int[] sel=new int[]{3,5,7};
vector.flatten(true,sel,3);
for (int i=1; i < 10; i++) {
assertEquals("failure at " + i,i == 3 || i == 5 || i == 7,vector.isNull[i]);
}
vector.unFlatten();
vector.noNulls=true;
vector.isRepeating=true;
vector.offsets[0]=0;
vector.lengths[0]=3;
vector.childCount=3;
vector.flatten(false,null,10);
assertFalse(vector.isRepeating);
assertFalse(vector.noNulls);
assertFalse(col1.isRepeating);
assertFalse(col1.noNulls);
assertFalse(col2.isRepeating);
assertFalse(col2.noNulls);
for (int i=0; i < 10; ++i) {
assertEquals("offset at " + i,0,vector.offsets[i]);
assertEquals("length at " + i,3,vector.lengths[i]);
}
for (int i=0; i < 10; ++i) {
StringBuilder buf=new StringBuilder();
vector.stringifyValue(buf,i);
assertEquals("[{\"key\": 3, \"value\": 0.0}," + " {\"key\": 3, \"value\": 10.0}," + " {\"key\": 3, \"value\": 20.0}]",buf.toString());
}
vector.unFlatten();
assertTrue(col1.isRepeating);
assertTrue(col1.noNulls);
assertTrue(vector.isRepeating);
assertFalse(col2.isRepeating);
assertTrue(col2.noNulls);
assertTrue(vector.noNulls);
Arrays.fill(vector.offsets,1,10,-1);
Arrays.fill(vector.lengths,1,10,-1);
Arrays.fill(col1.vector,1,10,-1);
vector.flatten(true,sel,3);
for (int i=1; i < 10; i++) {
if (i == 3 || i == 5 || i == 7) {
assertEquals("failure at " + i,0,vector.offsets[i]);
assertEquals("failure at " + i,3,vector.lengths[i]);
}
else {
assertEquals("failure at " + i,-1,vector.offsets[i]);
assertEquals("failure at " + i,-1,vector.lengths[i]);
}
}
for (int i=0; i < 3; ++i) {
assertEquals("failure at " + i,3,col1.vector[i]);
}
for (int i=3; i < 10; ++i) {
assertEquals("failure at " + i,-1,col1.vector[i]);
}
vector.unFlatten();
vector.reset();
assertFalse(col1.isRepeating);
assertTrue(col1.noNulls);
assertFalse(col2.isRepeating);
assertTrue(col2.noNulls);
assertFalse(vector.isRepeating);
assertTrue(vector.noNulls);
assertEquals(0,vector.childCount);
}
Class: org.apache.hadoop.hive.ql.exec.vector.TestStructColumnVector InternalCallVerifier EqualityVerifier
@Test public void testSet() throws Exception {
LongColumnVector input1=new LongColumnVector(10);
LongColumnVector input2=new LongColumnVector(10);
StructColumnVector input=new StructColumnVector(10,input1,input2);
input.init();
LongColumnVector output1=new LongColumnVector(10);
LongColumnVector output2=new LongColumnVector(10);
StructColumnVector output=new StructColumnVector(10,output1,output2);
output.init();
input1.isRepeating=true;
input2.noNulls=false;
input2.isNull[5]=true;
input.noNulls=false;
input.isNull[6]=true;
for (int i=0; i < 10; ++i) {
input1.vector[i]=i + 1;
input2.vector[i]=i + 2;
}
output.setElement(3,6,input);
StringBuilder buf=new StringBuilder();
output.stringifyValue(buf,3);
assertEquals("null",buf.toString());
output.setElement(3,5,input);
buf=new StringBuilder();
output.stringifyValue(buf,3);
assertEquals("[1, null]",buf.toString());
output.setElement(3,4,input);
buf=new StringBuilder();
output.stringifyValue(buf,3);
assertEquals("[1, 6]",buf.toString());
input.reset();
assertEquals(false,input1.isRepeating);
assertEquals(true,input.noNulls);
}
IterativeVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testFlatten() throws Exception {
LongColumnVector col1=new LongColumnVector(10);
LongColumnVector col2=new LongColumnVector(10);
StructColumnVector vector=new StructColumnVector(10,col1,col2);
vector.init();
col1.isRepeating=true;
for (int i=0; i < 10; ++i) {
col1.vector[i]=i;
col2.vector[i]=2 * i;
}
vector.flatten(false,null,10);
assertFalse(col1.isRepeating);
for (int i=0; i < 10; ++i) {
assertEquals("col1 at " + i,0,col1.vector[i]);
assertEquals("col2 at " + i,2 * i,col2.vector[i]);
}
vector.unFlatten();
assertTrue(col1.isRepeating);
for (int i=0; i < 10; ++i) {
StringBuilder buf=new StringBuilder();
vector.stringifyValue(buf,i);
assertEquals("[0, " + (2 * i) + "]",buf.toString());
}
vector.reset();
assertFalse(col1.isRepeating);
}
Class: org.apache.hadoop.hive.ql.exec.vector.TestUnionColumnVector InternalCallVerifier EqualityVerifier
@Test public void testSet() throws Exception {
LongColumnVector input1=new LongColumnVector(10);
LongColumnVector input2=new LongColumnVector(10);
UnionColumnVector input=new UnionColumnVector(10,input1,input2);
input.init();
LongColumnVector output1=new LongColumnVector(10);
LongColumnVector output2=new LongColumnVector(10);
UnionColumnVector output=new UnionColumnVector(10,output1,output2);
output.init();
input1.isRepeating=true;
for (int i=0; i < 10; ++i) {
input.tags[i]=i % 2;
input1.vector[i]=i + 1;
input2.vector[i]=i + 2;
}
output.setElement(3,4,input);
StringBuilder buf=new StringBuilder();
output.stringifyValue(buf,3);
assertEquals("{\"tag\": 0, \"value\": 1}",buf.toString());
input.noNulls=false;
input.isNull[5]=true;
output.setElement(3,5,input);
buf=new StringBuilder();
output.stringifyValue(buf,3);
assertEquals("null",buf.toString());
input.reset();
assertEquals(false,input1.isRepeating);
assertEquals(true,input.noNulls);
}
IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testFlatten() throws Exception {
LongColumnVector col1=new LongColumnVector(10);
LongColumnVector col2=new LongColumnVector(10);
UnionColumnVector vector=new UnionColumnVector(10,col1,col2);
vector.init();
col1.isRepeating=true;
for (int i=0; i < 10; ++i) {
vector.tags[i]=i % 2;
col1.vector[i]=i;
col2.vector[i]=2 * i;
}
vector.flatten(false,null,10);
assertFalse(col1.isRepeating);
for (int i=0; i < 10; ++i) {
assertEquals(i % 2,vector.tags[i]);
assertEquals("col1 at " + i,0,col1.vector[i]);
assertEquals("col2 at " + i,2 * i,col2.vector[i]);
}
vector.unFlatten();
assertTrue(col1.isRepeating);
for (int i=0; i < 10; ++i) {
StringBuilder buf=new StringBuilder();
vector.stringifyValue(buf,i);
assertEquals("{\"tag\": " + (i % 2) + ", \"value\": "+ (i % 2 == 0 ? 0 : 2 * i)+ "}",buf.toString());
}
vector.reset();
assertFalse(col1.isRepeating);
}
Class: org.apache.hadoop.hive.ql.exec.vector.TestVectorFilterOperator InternalCallVerifier EqualityVerifier
@Test public void testBasicFilterOperator() throws HiveException {
VectorFilterOperator vfo=getAVectorFilterOperator();
vfo.initialize(hconf,null);
VectorExpression ve1=new FilterLongColGreaterLongColumn(0,1);
VectorExpression ve2=new FilterLongColEqualDoubleScalar(2,0);
VectorExpression ve3=new FilterExprAndExpr();
ve3.setChildExpressions(new VectorExpression[]{ve1,ve2});
vfo.setFilterCondition(ve3);
FakeDataReader fdr=new FakeDataReader(1024 * 1,3);
VectorizedRowBatch vrg=fdr.getNext();
vfo.getConditionEvaluator().evaluate(vrg);
int rows=0;
for (int i=0; i < 1024; i++) {
LongColumnVector l1=(LongColumnVector)vrg.cols[0];
LongColumnVector l2=(LongColumnVector)vrg.cols[1];
LongColumnVector l3=(LongColumnVector)vrg.cols[2];
if ((l1.vector[i] > l2.vector[i]) && (l3.vector[i] == 0)) {
rows++;
}
}
Assert.assertEquals(rows,vrg.size);
}
Class: org.apache.hadoop.hive.ql.exec.vector.TestVectorizationContext InternalCallVerifier EqualityVerifier
@Test public void testFilterBooleanColumnCompareBooleanScalar() throws HiveException {
ExprNodeGenericFuncDesc colEqualScalar=new ExprNodeGenericFuncDesc();
GenericUDFOPEqual gudf=new GenericUDFOPEqual();
colEqualScalar.setGenericUDF(gudf);
List children=new ArrayList(2);
ExprNodeConstantDesc constDesc=new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo,20);
ExprNodeColumnDesc colDesc=new ExprNodeColumnDesc(Boolean.class,"a","table",false);
children.add(colDesc);
children.add(constDesc);
colEqualScalar.setChildren(children);
List columns=new ArrayList();
columns.add("a");
VectorizationContext vc=new VectorizationContext("name",columns);
VectorExpression ve=vc.getVectorExpression(colEqualScalar,VectorExpressionDescriptor.Mode.FILTER);
assertEquals(FilterLongColEqualLongScalar.class,ve.getClass());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testMathFunctions() throws HiveException {
ExprNodeGenericFuncDesc mathFuncExpr=new ExprNodeGenericFuncDesc();
mathFuncExpr.setTypeInfo(TypeInfoFactory.doubleTypeInfo);
ExprNodeColumnDesc colDesc1=new ExprNodeColumnDesc(Integer.class,"a","table",false);
ExprNodeColumnDesc colDesc2=new ExprNodeColumnDesc(Double.class,"b","table",false);
List children1=new ArrayList();
List children2=new ArrayList();
children1.add(colDesc1);
children2.add(colDesc2);
List columns=new ArrayList();
columns.add("b");
columns.add("a");
VectorizationContext vc=new VectorizationContext("name",columns);
GenericUDFBridge gudfBridge=new GenericUDFBridge("sin",false,UDFSin.class.getName());
mathFuncExpr.setGenericUDF(gudfBridge);
mathFuncExpr.setChildren(children2);
VectorExpression ve=vc.getVectorExpression(mathFuncExpr,VectorExpressionDescriptor.Mode.PROJECTION);
Assert.assertEquals(FuncSinDoubleToDouble.class,ve.getClass());
GenericUDFRound udfRound=new GenericUDFRound();
mathFuncExpr.setGenericUDF(udfRound);
mathFuncExpr.setChildren(children2);
ve=vc.getVectorExpression(mathFuncExpr);
Assert.assertEquals(FuncRoundDoubleToDouble.class,ve.getClass());
GenericUDFBRound udfBRound=new GenericUDFBRound();
mathFuncExpr.setGenericUDF(udfBRound);
ve=vc.getVectorExpression(mathFuncExpr);
Assert.assertEquals(FuncBRoundDoubleToDouble.class,ve.getClass());
mathFuncExpr.setGenericUDF(udfRound);
children2.add(new ExprNodeConstantDesc(4));
mathFuncExpr.setChildren(children2);
ve=vc.getVectorExpression(mathFuncExpr);
Assert.assertEquals(RoundWithNumDigitsDoubleToDouble.class,ve.getClass());
Assert.assertEquals(4,((RoundWithNumDigitsDoubleToDouble)ve).getDecimalPlaces().get());
mathFuncExpr.setGenericUDF(udfBRound);
ve=vc.getVectorExpression(mathFuncExpr);
Assert.assertEquals(BRoundWithNumDigitsDoubleToDouble.class,ve.getClass());
Assert.assertEquals(4,((BRoundWithNumDigitsDoubleToDouble)ve).getDecimalPlaces().get());
gudfBridge=new GenericUDFBridge("log",false,UDFLog.class.getName());
mathFuncExpr.setGenericUDF(gudfBridge);
children2.clear();
children2.add(new ExprNodeConstantDesc(4.0));
children2.add(colDesc2);
mathFuncExpr.setChildren(children2);
ve=vc.getVectorExpression(mathFuncExpr);
Assert.assertEquals(FuncLogWithBaseDoubleToDouble.class,ve.getClass());
Assert.assertTrue(4 == ((FuncLogWithBaseDoubleToDouble)ve).getBase());
children2.clear();
children2.add(colDesc2);
mathFuncExpr.setChildren(children2);
ve=vc.getVectorExpression(mathFuncExpr);
Assert.assertEquals(FuncLnDoubleToDouble.class,ve.getClass());
children2.clear();
children2.add(new ExprNodeConstantDesc(4.5));
children2.add(colDesc2);
mathFuncExpr.setChildren(children2);
ve=vc.getVectorExpression(mathFuncExpr);
Assert.assertEquals(FuncLogWithBaseDoubleToDouble.class,ve.getClass());
Assert.assertTrue(4.5 == ((FuncLogWithBaseDoubleToDouble)ve).getBase());
children2.clear();
children2.add(new ExprNodeConstantDesc(4.5));
children2.add(colDesc1);
mathFuncExpr.setChildren(children2);
ve=vc.getVectorExpression(mathFuncExpr);
Assert.assertEquals(FuncLogWithBaseLongToDouble.class,ve.getClass());
Assert.assertTrue(4.5 == ((FuncLogWithBaseLongToDouble)ve).getBase());
children2.clear();
children2.add(colDesc2);
children2.add(new ExprNodeConstantDesc(4.5));
mathFuncExpr.setGenericUDF(new GenericUDFPower());
mathFuncExpr.setChildren(children2);
ve=vc.getVectorExpression(mathFuncExpr);
Assert.assertEquals(FuncPowerDoubleToDouble.class,ve.getClass());
Assert.assertTrue(4.5 == ((FuncPowerDoubleToDouble)ve).getPower());
mathFuncExpr.setGenericUDF(udfRound);
children2.clear();
children2.add(colDesc2);
mathFuncExpr.setChildren(children2);
ve=vc.getVectorExpression(mathFuncExpr);
Assert.assertEquals(FuncRoundDoubleToDouble.class,ve.getClass());
}
EqualityVerifier
@Test public void testSIMDGreaterEqual(){
long a;
long b;
a=0;
b=0;
assertEquals(a >= b ? 1 : 0,((a - b) >>> 63) ^ 1);
a=1;
b=0;
assertEquals(a >= b ? 1 : 0,((a - b) >>> 63) ^ 1);
a=0;
b=1;
assertEquals(a >= b ? 1 : 0,((a - b) >>> 63) ^ 1);
}
EqualityVerifier
@Test public void testSIMDLessEqual(){
long a;
long b;
a=0;
b=0;
assertEquals(a <= b ? 1 : 0,((b - a) >>> 63) ^ 1);
a=1;
b=0;
assertEquals(a <= b ? 1 : 0,((b - a) >>> 63) ^ 1);
a=0;
b=1;
assertEquals(a <= b ? 1 : 0,((b - a) >>> 63) ^ 1);
}
InternalCallVerifier EqualityVerifier
@Test public void testUnaryStringExpressions() throws HiveException {
ExprNodeGenericFuncDesc stringUnary=new ExprNodeGenericFuncDesc();
stringUnary.setTypeInfo(TypeInfoFactory.stringTypeInfo);
ExprNodeColumnDesc colDesc=new ExprNodeColumnDesc(String.class,"a","table",false);
List children=new ArrayList();
children.add(colDesc);
stringUnary.setChildren(children);
List columns=new ArrayList();
columns.add("b");
columns.add("a");
VectorizationContext vc=new VectorizationContext("name",columns);
GenericUDF stringLower=new GenericUDFLower();
stringUnary.setGenericUDF(stringLower);
VectorExpression ve=vc.getVectorExpression(stringUnary);
assertEquals(StringLower.class,ve.getClass());
assertEquals(1,((StringLower)ve).getColNum());
assertEquals(2,((StringLower)ve).getOutputColumn());
vc=new VectorizationContext("name",columns);
ExprNodeGenericFuncDesc anotherUnary=new ExprNodeGenericFuncDesc();
anotherUnary.setTypeInfo(TypeInfoFactory.stringTypeInfo);
List children2=new ArrayList();
children2.add(stringUnary);
anotherUnary.setChildren(children2);
GenericUDFBridge udfbridge=new GenericUDFBridge("ltrim",false,GenericUDFLTrim.class.getName());
anotherUnary.setGenericUDF(udfbridge);
ve=vc.getVectorExpression(anotherUnary);
VectorExpression childVe=ve.getChildExpressions()[0];
assertEquals(StringLower.class,childVe.getClass());
assertEquals(1,((StringLower)childVe).getColNum());
assertEquals(2,((StringLower)childVe).getOutputColumn());
assertEquals(StringLTrim.class,ve.getClass());
assertEquals(2,((StringLTrim)ve).getInputColumn());
assertEquals(3,((StringLTrim)ve).getOutputColumn());
}
EqualityVerifier
@Test public void testSIMDEqual(){
long a;
long b;
a=0;
b=0;
assertEquals(a == b ? 1 : 0,(((a - b) ^ (b - a)) >>> 63) ^ 1);
a=1;
b=0;
assertEquals(a == b ? 1 : 0,(((a - b) ^ (b - a)) >>> 63) ^ 1);
a=0;
b=1;
assertEquals(a == b ? 1 : 0,(((a - b) ^ (b - a)) >>> 63) ^ 1);
}
EqualityVerifier
@Test public void testSIMDNotEqual(){
long a;
long b;
a=0;
b=0;
assertEquals(a != b ? 1 : 0,((a - b) ^ (b - a)) >>> 63);
a=1;
b=0;
assertEquals(a != b ? 1 : 0,((a - b) ^ (b - a)) >>> 63);
a=0;
b=1;
assertEquals(a != b ? 1 : 0,((a - b) ^ (b - a)) >>> 63);
}
InternalCallVerifier EqualityVerifier
@Test public void testTimeStampUdfs() throws HiveException {
ExprNodeGenericFuncDesc tsFuncExpr=new ExprNodeGenericFuncDesc();
tsFuncExpr.setTypeInfo(TypeInfoFactory.intTypeInfo);
ExprNodeColumnDesc colDesc1=new ExprNodeColumnDesc(TypeInfoFactory.timestampTypeInfo,"a","table",false);
List children=new ArrayList();
children.add(colDesc1);
List columns=new ArrayList();
columns.add("b");
columns.add("a");
VectorizationContext vc=new VectorizationContext("name",columns);
GenericUDFBridge gudfBridge=new GenericUDFBridge("year",false,UDFYear.class.getName());
tsFuncExpr.setGenericUDF(gudfBridge);
tsFuncExpr.setChildren(children);
VectorExpression ve=vc.getVectorExpression(tsFuncExpr);
Assert.assertEquals(VectorUDFYearLong.class,ve.getClass());
GenericUDFToUnixTimeStamp gudf=new GenericUDFToUnixTimeStamp();
tsFuncExpr.setGenericUDF(gudf);
tsFuncExpr.setTypeInfo(TypeInfoFactory.longTypeInfo);
ve=vc.getVectorExpression(tsFuncExpr);
Assert.assertEquals(VectorUDFUnixTimeStampLong.class,ve.getClass());
}
InternalCallVerifier EqualityVerifier
@Test public void testNotNullExpressions() throws HiveException {
ExprNodeColumnDesc col1Expr=new ExprNodeColumnDesc(Integer.class,"col1","table",false);
ExprNodeConstantDesc constDesc=new ExprNodeConstantDesc(new Integer(10));
GenericUDFOPGreaterThan udf=new GenericUDFOPGreaterThan();
ExprNodeGenericFuncDesc greaterExprDesc=new ExprNodeGenericFuncDesc();
greaterExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
greaterExprDesc.setGenericUDF(udf);
List children1=new ArrayList(2);
children1.add(col1Expr);
children1.add(constDesc);
greaterExprDesc.setChildren(children1);
ExprNodeGenericFuncDesc isNotNullExpr=new ExprNodeGenericFuncDesc();
isNotNullExpr.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
GenericUDFOPNotNull notNullUdf=new GenericUDFOPNotNull();
isNotNullExpr.setGenericUDF(notNullUdf);
List childOfNot=new ArrayList();
childOfNot.add(greaterExprDesc);
isNotNullExpr.setChildren(childOfNot);
List columns=new ArrayList();
columns.add("col1");
columns.add("col2");
VectorizationContext vc=new VectorizationContext("name",columns);
VectorExpression ve=vc.getVectorExpression(isNotNullExpr,VectorExpressionDescriptor.Mode.FILTER);
assertEquals(ve.getClass(),SelectColumnIsNotNull.class);
assertEquals(2,((SelectColumnIsNotNull)ve).getColNum());
assertEquals(ve.getChildExpressions()[0].getClass(),LongColGreaterLongScalar.class);
ve=vc.getVectorExpression(isNotNullExpr,VectorExpressionDescriptor.Mode.PROJECTION);
assertEquals(ve.getClass(),IsNotNull.class);
assertEquals(2,((IsNotNull)ve).getColNum());
assertEquals(ve.getChildExpressions()[0].getClass(),LongColGreaterLongScalar.class);
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testBetweenFilters() throws HiveException {
ExprNodeColumnDesc col1Expr=new ExprNodeColumnDesc(String.class,"col1","table",false);
ExprNodeConstantDesc constDesc=new ExprNodeConstantDesc("Alpha");
ExprNodeConstantDesc constDesc2=new ExprNodeConstantDesc("Bravo");
GenericUDFBetween udf=new GenericUDFBetween();
List children1=new ArrayList();
children1.add(new ExprNodeConstantDesc(new Boolean(false)));
children1.add(col1Expr);
children1.add(constDesc);
children1.add(constDesc2);
ExprNodeGenericFuncDesc exprDesc=new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo,udf,children1);
List columns=new ArrayList();
columns.add("col0");
columns.add("col1");
columns.add("col2");
VectorizationContext vc=new VectorizationContext("name",columns);
VectorExpression ve=vc.getVectorExpression(exprDesc,VectorExpressionDescriptor.Mode.FILTER);
assertTrue(ve instanceof FilterStringColumnBetween);
children1.set(0,new ExprNodeConstantDesc(new Boolean(true)));
ve=vc.getVectorExpression(exprDesc,VectorExpressionDescriptor.Mode.FILTER);
assertTrue(ve instanceof FilterStringColumnNotBetween);
CharTypeInfo charTypeInfo=new CharTypeInfo(10);
col1Expr=new ExprNodeColumnDesc(charTypeInfo,"col1","table",false);
constDesc=new ExprNodeConstantDesc(charTypeInfo,new HiveChar("Alpha",10));
constDesc2=new ExprNodeConstantDesc(charTypeInfo,new HiveChar("Bravo",10));
udf=new GenericUDFBetween();
children1=new ArrayList();
children1.add(new ExprNodeConstantDesc(new Boolean(false)));
children1.add(col1Expr);
children1.add(constDesc);
children1.add(constDesc2);
exprDesc=new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo,udf,children1);
vc=new VectorizationContext("name",columns);
ve=vc.getVectorExpression(exprDesc,VectorExpressionDescriptor.Mode.FILTER);
assertTrue(ve instanceof FilterCharColumnBetween);
children1.set(0,new ExprNodeConstantDesc(new Boolean(true)));
ve=vc.getVectorExpression(exprDesc,VectorExpressionDescriptor.Mode.FILTER);
assertTrue(ve instanceof FilterCharColumnNotBetween);
VarcharTypeInfo varcharTypeInfo=new VarcharTypeInfo(10);
col1Expr=new ExprNodeColumnDesc(varcharTypeInfo,"col1","table",false);
constDesc=new ExprNodeConstantDesc(varcharTypeInfo,new HiveVarchar("Alpha",10));
constDesc2=new ExprNodeConstantDesc(varcharTypeInfo,new HiveVarchar("Bravo",10));
udf=new GenericUDFBetween();
children1=new ArrayList();
children1.add(new ExprNodeConstantDesc(new Boolean(false)));
children1.add(col1Expr);
children1.add(constDesc);
children1.add(constDesc2);
exprDesc=new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo,udf,children1);
vc=new VectorizationContext("name",columns);
ve=vc.getVectorExpression(exprDesc,VectorExpressionDescriptor.Mode.FILTER);
assertTrue(ve instanceof FilterVarCharColumnBetween);
children1.set(0,new ExprNodeConstantDesc(new Boolean(true)));
ve=vc.getVectorExpression(exprDesc,VectorExpressionDescriptor.Mode.FILTER);
assertTrue(ve instanceof FilterVarCharColumnNotBetween);
children1.set(0,new ExprNodeConstantDesc(new Boolean(false)));
children1.set(1,new ExprNodeColumnDesc(Long.class,"col1","table",false));
children1.set(2,new ExprNodeConstantDesc(10));
children1.set(3,new ExprNodeConstantDesc(20));
ve=vc.getVectorExpression(exprDesc,VectorExpressionDescriptor.Mode.FILTER);
assertTrue(ve instanceof FilterLongColumnBetween);
children1.set(0,new ExprNodeConstantDesc(new Boolean(true)));
ve=vc.getVectorExpression(exprDesc,VectorExpressionDescriptor.Mode.FILTER);
assertTrue(ve instanceof FilterLongColumnNotBetween);
children1.set(0,new ExprNodeConstantDesc(new Boolean(false)));
children1.set(1,new ExprNodeColumnDesc(Double.class,"col1","table",false));
children1.set(2,new ExprNodeConstantDesc(10.0d));
children1.set(3,new ExprNodeConstantDesc(20.0d));
ve=vc.getVectorExpression(exprDesc,VectorExpressionDescriptor.Mode.FILTER);
assertTrue(ve instanceof FilterDoubleColumnBetween);
children1.set(0,new ExprNodeConstantDesc(new Boolean(true)));
ve=vc.getVectorExpression(exprDesc,VectorExpressionDescriptor.Mode.FILTER);
assertTrue(ve instanceof FilterDoubleColumnNotBetween);
children1.set(0,new ExprNodeConstantDesc(new Boolean(false)));
children1.set(1,new ExprNodeColumnDesc(Timestamp.class,"col1","table",false));
children1.set(2,new ExprNodeConstantDesc("2013-11-05 00:00:00.000"));
children1.set(3,new ExprNodeConstantDesc("2013-11-06 00:00:00.000"));
ve=vc.getVectorExpression(exprDesc,VectorExpressionDescriptor.Mode.FILTER);
assertEquals(FilterStringColumnBetween.class,ve.getClass());
children1.set(0,new ExprNodeConstantDesc(new Boolean(true)));
ve=vc.getVectorExpression(exprDesc,VectorExpressionDescriptor.Mode.FILTER);
assertEquals(FilterStringColumnNotBetween.class,ve.getClass());
}
InternalCallVerifier EqualityVerifier
@Test public void testFilterScalarCompareColumn() throws HiveException {
ExprNodeGenericFuncDesc scalarGreaterColExpr=new ExprNodeGenericFuncDesc();
GenericUDFOPGreaterThan gudf=new GenericUDFOPGreaterThan();
scalarGreaterColExpr.setGenericUDF(gudf);
List children=new ArrayList(2);
ExprNodeConstantDesc constDesc=new ExprNodeConstantDesc(TypeInfoFactory.longTypeInfo,20);
ExprNodeColumnDesc colDesc=new ExprNodeColumnDesc(Long.class,"a","table",false);
children.add(constDesc);
children.add(colDesc);
scalarGreaterColExpr.setChildren(children);
List columns=new ArrayList();
columns.add("a");
VectorizationContext vc=new VectorizationContext("name",columns);
VectorExpression ve=vc.getVectorExpression(scalarGreaterColExpr,VectorExpressionDescriptor.Mode.FILTER);
assertEquals(FilterLongScalarGreaterLongColumn.class,ve.getClass());
}
InternalCallVerifier EqualityVerifier
@Test public void testBooleanColumnCompareBooleanScalar() throws HiveException {
ExprNodeGenericFuncDesc colEqualScalar=new ExprNodeGenericFuncDesc();
GenericUDFOPEqual gudf=new GenericUDFOPEqual();
colEqualScalar.setGenericUDF(gudf);
List children=new ArrayList(2);
ExprNodeConstantDesc constDesc=new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo,20);
ExprNodeColumnDesc colDesc=new ExprNodeColumnDesc(Boolean.class,"a","table",false);
children.add(colDesc);
children.add(constDesc);
colEqualScalar.setChildren(children);
List columns=new ArrayList();
columns.add("a");
VectorizationContext vc=new VectorizationContext("name",columns);
VectorExpression ve=vc.getVectorExpression(colEqualScalar,VectorExpressionDescriptor.Mode.PROJECTION);
assertEquals(LongColEqualLongScalar.class,ve.getClass());
}
EqualityVerifier
@Test public void testSIMDLessThan(){
long a;
long b;
a=0;
b=0;
assertEquals(a < b ? 1 : 0,(a - b) >>> 63);
a=1;
b=0;
assertEquals(a < b ? 1 : 0,(a - b) >>> 63);
a=0;
b=1;
assertEquals(a < b ? 1 : 0,(a - b) >>> 63);
}
InternalCallVerifier EqualityVerifier
@Test public void testVectorizeScalarColumnExpression() throws HiveException {
GenericUDFOPMinus gudf=new GenericUDFOPMinus();
List children=new ArrayList(2);
ExprNodeConstantDesc constDesc=new ExprNodeConstantDesc(TypeInfoFactory.longTypeInfo,20);
ExprNodeColumnDesc colDesc=new ExprNodeColumnDesc(Long.class,"a","table",false);
children.add(constDesc);
children.add(colDesc);
ExprNodeGenericFuncDesc scalarMinusConstant=new ExprNodeGenericFuncDesc(TypeInfoFactory.longTypeInfo,gudf,children);
List columns=new ArrayList();
columns.add("a");
VectorizationContext vc=new VectorizationContext("name",columns);
VectorExpression ve=vc.getVectorExpression(scalarMinusConstant,VectorExpressionDescriptor.Mode.PROJECTION);
assertEquals(ve.getClass(),LongScalarSubtractLongColumn.class);
}
InternalCallVerifier EqualityVerifier
@Test public void testNullExpressions() throws HiveException {
ExprNodeColumnDesc col1Expr=new ExprNodeColumnDesc(Integer.class,"col1","table",false);
ExprNodeConstantDesc constDesc=new ExprNodeConstantDesc(new Integer(10));
GenericUDFOPGreaterThan udf=new GenericUDFOPGreaterThan();
ExprNodeGenericFuncDesc greaterExprDesc=new ExprNodeGenericFuncDesc();
greaterExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
greaterExprDesc.setGenericUDF(udf);
List children1=new ArrayList(2);
children1.add(col1Expr);
children1.add(constDesc);
greaterExprDesc.setChildren(children1);
ExprNodeGenericFuncDesc isNullExpr=new ExprNodeGenericFuncDesc();
isNullExpr.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
GenericUDFOPNull isNullUdf=new GenericUDFOPNull();
isNullExpr.setGenericUDF(isNullUdf);
List childOfIsNull=new ArrayList();
childOfIsNull.add(greaterExprDesc);
isNullExpr.setChildren(childOfIsNull);
List columns=new ArrayList();
columns.add("col1");
columns.add("col2");
VectorizationContext vc=new VectorizationContext("name",columns);
VectorExpression ve=vc.getVectorExpression(isNullExpr,VectorExpressionDescriptor.Mode.FILTER);
assertEquals(ve.getClass(),SelectColumnIsNull.class);
assertEquals(ve.getChildExpressions()[0].getClass(),LongColGreaterLongScalar.class);
assertEquals(2,ve.getChildExpressions()[0].getOutputColumn());
assertEquals(2,((SelectColumnIsNull)ve).getColNum());
ve=vc.getVectorExpression(isNullExpr,VectorExpressionDescriptor.Mode.PROJECTION);
assertEquals(ve.getClass(),IsNull.class);
assertEquals(2,((IsNull)ve).getColNum());
assertEquals(3,ve.getOutputColumn());
assertEquals(ve.getChildExpressions()[0].getClass(),LongColGreaterLongScalar.class);
}
InternalCallVerifier EqualityVerifier
@Test public void testNotExpression() throws HiveException {
ExprNodeColumnDesc col1Expr=new ExprNodeColumnDesc(Integer.class,"col1","table",false);
ExprNodeConstantDesc constDesc=new ExprNodeConstantDesc(new Integer(10));
GenericUDFOPGreaterThan udf=new GenericUDFOPGreaterThan();
ExprNodeGenericFuncDesc greaterExprDesc=new ExprNodeGenericFuncDesc();
greaterExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
greaterExprDesc.setGenericUDF(udf);
List children1=new ArrayList(2);
children1.add(col1Expr);
children1.add(constDesc);
greaterExprDesc.setChildren(children1);
ExprNodeGenericFuncDesc notExpr=new ExprNodeGenericFuncDesc();
notExpr.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
GenericUDFOPNot notUdf=new GenericUDFOPNot();
notExpr.setGenericUDF(notUdf);
List childOfNot=new ArrayList();
childOfNot.add(greaterExprDesc);
notExpr.setChildren(childOfNot);
List columns=new ArrayList();
columns.add("col0");
columns.add("col1");
columns.add("col2");
VectorizationContext vc=new VectorizationContext("name",columns);
VectorExpression ve=vc.getVectorExpression(notExpr,VectorExpressionDescriptor.Mode.FILTER);
assertEquals(ve.getClass(),SelectColumnIsFalse.class);
assertEquals(ve.getChildExpressions()[0].getClass(),LongColGreaterLongScalar.class);
ve=vc.getVectorExpression(notExpr,VectorExpressionDescriptor.Mode.PROJECTION);
assertEquals(ve.getClass(),NotCol.class);
assertEquals(ve.getChildExpressions()[0].getClass(),LongColGreaterLongScalar.class);
}
InternalCallVerifier EqualityVerifier
@Test public void testVectorizeFilterAndOrExpression() throws HiveException {
ExprNodeColumnDesc col1Expr=new ExprNodeColumnDesc(Integer.class,"col1","table",false);
ExprNodeConstantDesc constDesc=new ExprNodeConstantDesc(new Integer(10));
GenericUDFOPGreaterThan udf=new GenericUDFOPGreaterThan();
ExprNodeGenericFuncDesc greaterExprDesc=new ExprNodeGenericFuncDesc();
greaterExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
greaterExprDesc.setGenericUDF(udf);
List children1=new ArrayList(2);
children1.add(col1Expr);
children1.add(constDesc);
greaterExprDesc.setChildren(children1);
ExprNodeColumnDesc col2Expr=new ExprNodeColumnDesc(Float.class,"col2","table",false);
ExprNodeConstantDesc const2Desc=new ExprNodeConstantDesc(new Float(1.0));
GenericUDFOPLessThan udf2=new GenericUDFOPLessThan();
ExprNodeGenericFuncDesc lessExprDesc=new ExprNodeGenericFuncDesc();
lessExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
lessExprDesc.setGenericUDF(udf2);
List children2=new ArrayList(2);
children2.add(col2Expr);
children2.add(const2Desc);
lessExprDesc.setChildren(children2);
GenericUDFOPAnd andUdf=new GenericUDFOPAnd();
ExprNodeGenericFuncDesc andExprDesc=new ExprNodeGenericFuncDesc();
andExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
andExprDesc.setGenericUDF(andUdf);
List children3=new ArrayList(2);
children3.add(greaterExprDesc);
children3.add(lessExprDesc);
andExprDesc.setChildren(children3);
List columns=new ArrayList();
columns.add("col0");
columns.add("col1");
columns.add("col2");
VectorizationContext vc=new VectorizationContext("name",columns);
VectorExpression ve=vc.getVectorExpression(andExprDesc,VectorExpressionDescriptor.Mode.FILTER);
assertEquals(ve.getClass(),FilterExprAndExpr.class);
assertEquals(ve.getChildExpressions()[0].getClass(),FilterLongColGreaterLongScalar.class);
assertEquals(ve.getChildExpressions()[1].getClass(),FilterDoubleColLessDoubleScalar.class);
GenericUDFOPOr orUdf=new GenericUDFOPOr();
ExprNodeGenericFuncDesc orExprDesc=new ExprNodeGenericFuncDesc();
orExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
orExprDesc.setGenericUDF(orUdf);
List children4=new ArrayList(2);
children4.add(greaterExprDesc);
children4.add(lessExprDesc);
orExprDesc.setChildren(children4);
VectorExpression veOr=vc.getVectorExpression(orExprDesc,VectorExpressionDescriptor.Mode.FILTER);
assertEquals(veOr.getClass(),FilterExprOrExpr.class);
assertEquals(veOr.getChildExpressions()[0].getClass(),FilterLongColGreaterLongScalar.class);
assertEquals(veOr.getChildExpressions()[1].getClass(),FilterDoubleColLessDoubleScalar.class);
}
InternalCallVerifier EqualityVerifier
@Test public void testVectorizeFilterMultiAndOrExpression() throws HiveException {
ExprNodeColumnDesc col1Expr=new ExprNodeColumnDesc(Integer.class,"col1","table",false);
ExprNodeConstantDesc constDesc=new ExprNodeConstantDesc(new Integer(10));
GenericUDFOPGreaterThan udf=new GenericUDFOPGreaterThan();
ExprNodeGenericFuncDesc greaterExprDesc=new ExprNodeGenericFuncDesc();
greaterExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
greaterExprDesc.setGenericUDF(udf);
List children1=new ArrayList(2);
children1.add(col1Expr);
children1.add(constDesc);
greaterExprDesc.setChildren(children1);
ExprNodeColumnDesc col2Expr=new ExprNodeColumnDesc(Float.class,"col2","table",false);
ExprNodeConstantDesc const2Desc=new ExprNodeConstantDesc(new Float(1.0));
GenericUDFOPLessThan udf2=new GenericUDFOPLessThan();
ExprNodeGenericFuncDesc lessExprDesc=new ExprNodeGenericFuncDesc();
lessExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
lessExprDesc.setGenericUDF(udf2);
List children2=new ArrayList(2);
children2.add(col2Expr);
children2.add(const2Desc);
lessExprDesc.setChildren(children2);
ExprNodeColumnDesc col3Expr=new ExprNodeColumnDesc(Integer.class,"col3","table",false);
ExprNodeConstantDesc const3Desc=new ExprNodeConstantDesc(new Integer(10));
GenericUDFOPGreaterThan udf3=new GenericUDFOPGreaterThan();
ExprNodeGenericFuncDesc greaterExprDesc3=new ExprNodeGenericFuncDesc();
greaterExprDesc3.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
greaterExprDesc3.setGenericUDF(udf3);
List children3=new ArrayList(2);
children3.add(col3Expr);
children3.add(const3Desc);
greaterExprDesc3.setChildren(children3);
GenericUDFOPAnd andUdf=new GenericUDFOPAnd();
ExprNodeGenericFuncDesc andExprDesc=new ExprNodeGenericFuncDesc();
andExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
andExprDesc.setGenericUDF(andUdf);
List children4=new ArrayList(2);
children4.add(greaterExprDesc);
children4.add(lessExprDesc);
children4.add(greaterExprDesc3);
andExprDesc.setChildren(children4);
List columns=new ArrayList();
columns.add("col0");
columns.add("col1");
columns.add("col2");
columns.add("col3");
VectorizationContext vc=new VectorizationContext("name",columns);
VectorExpression ve=vc.getVectorExpression(andExprDesc,VectorExpressionDescriptor.Mode.FILTER);
assertEquals(ve.getClass(),FilterExprAndExpr.class);
assertEquals(ve.getChildExpressions()[0].getClass(),FilterLongColGreaterLongScalar.class);
assertEquals(ve.getChildExpressions()[1].getClass(),FilterDoubleColLessDoubleScalar.class);
assertEquals(ve.getChildExpressions()[2].getClass(),FilterLongColGreaterLongScalar.class);
GenericUDFOPOr orUdf=new GenericUDFOPOr();
ExprNodeGenericFuncDesc orExprDesc=new ExprNodeGenericFuncDesc();
orExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
orExprDesc.setGenericUDF(orUdf);
List children5=new ArrayList(2);
children5.add(greaterExprDesc);
children5.add(lessExprDesc);
children5.add(greaterExprDesc3);
orExprDesc.setChildren(children5);
VectorExpression veOr=vc.getVectorExpression(orExprDesc,VectorExpressionDescriptor.Mode.FILTER);
assertEquals(veOr.getClass(),FilterExprOrExpr.class);
assertEquals(veOr.getChildExpressions()[0].getClass(),FilterLongColGreaterLongScalar.class);
assertEquals(veOr.getChildExpressions()[1].getClass(),FilterDoubleColLessDoubleScalar.class);
assertEquals(ve.getChildExpressions()[2].getClass(),FilterLongColGreaterLongScalar.class);
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testArithmeticExpressionVectorization() throws HiveException {
GenericUDFOPPlus udf1=new GenericUDFOPPlus();
GenericUDFOPMinus udf2=new GenericUDFOPMinus();
GenericUDFOPMultiply udf3=new GenericUDFOPMultiply();
GenericUDFOPPlus udf4=new GenericUDFOPPlus();
GenericUDFOPMod udf5=new GenericUDFOPMod();
ExprNodeGenericFuncDesc sumExpr=new ExprNodeGenericFuncDesc();
sumExpr.setTypeInfo(TypeInfoFactory.intTypeInfo);
sumExpr.setGenericUDF(udf1);
ExprNodeGenericFuncDesc minusExpr=new ExprNodeGenericFuncDesc();
minusExpr.setTypeInfo(TypeInfoFactory.intTypeInfo);
minusExpr.setGenericUDF(udf2);
ExprNodeGenericFuncDesc multiplyExpr=new ExprNodeGenericFuncDesc();
multiplyExpr.setTypeInfo(TypeInfoFactory.intTypeInfo);
multiplyExpr.setGenericUDF(udf3);
ExprNodeGenericFuncDesc sum2Expr=new ExprNodeGenericFuncDesc();
sum2Expr.setTypeInfo(TypeInfoFactory.intTypeInfo);
sum2Expr.setGenericUDF(udf4);
ExprNodeGenericFuncDesc modExpr=new ExprNodeGenericFuncDesc();
modExpr.setTypeInfo(TypeInfoFactory.intTypeInfo);
modExpr.setGenericUDF(udf5);
ExprNodeColumnDesc col1Expr=new ExprNodeColumnDesc(Long.class,"col1","table",false);
ExprNodeColumnDesc col2Expr=new ExprNodeColumnDesc(Long.class,"col2","table",false);
ExprNodeColumnDesc col3Expr=new ExprNodeColumnDesc(Long.class,"col3","table",false);
ExprNodeColumnDesc col4Expr=new ExprNodeColumnDesc(Long.class,"col4","table",false);
ExprNodeColumnDesc col5Expr=new ExprNodeColumnDesc(Long.class,"col5","table",false);
ExprNodeColumnDesc col6Expr=new ExprNodeColumnDesc(Long.class,"col6","table",false);
List children1=new ArrayList(2);
List children2=new ArrayList(2);
List children3=new ArrayList(2);
List children4=new ArrayList(2);
List children5=new ArrayList(2);
children1.add(minusExpr);
children1.add(multiplyExpr);
sumExpr.setChildren(children1);
children2.add(sum2Expr);
children2.add(col3Expr);
minusExpr.setChildren(children2);
children3.add(col1Expr);
children3.add(col2Expr);
sum2Expr.setChildren(children3);
children4.add(col4Expr);
children4.add(modExpr);
multiplyExpr.setChildren(children4);
children5.add(col5Expr);
children5.add(col6Expr);
modExpr.setChildren(children5);
VectorizationContext vc=new VectorizationContext("name");
vc.addInitialColumn("col1");
vc.addInitialColumn("col2");
vc.addInitialColumn("col3");
vc.addInitialColumn("col4");
vc.addInitialColumn("col5");
vc.addInitialColumn("col6");
vc.finishedAddingInitialColumns();
VectorExpression ve=vc.getVectorExpression(sumExpr,VectorExpressionDescriptor.Mode.PROJECTION);
assertTrue(ve instanceof LongColAddLongColumn);
assertEquals(2,ve.getChildExpressions().length);
VectorExpression childExpr1=ve.getChildExpressions()[0];
VectorExpression childExpr2=ve.getChildExpressions()[1];
System.out.println(ve.toString());
assertEquals(6,ve.getOutputColumn());
assertTrue(childExpr1 instanceof LongColSubtractLongColumn);
assertEquals(1,childExpr1.getChildExpressions().length);
assertTrue(childExpr1.getChildExpressions()[0] instanceof LongColAddLongColumn);
assertEquals(7,childExpr1.getOutputColumn());
assertEquals(6,childExpr1.getChildExpressions()[0].getOutputColumn());
assertTrue(childExpr2 instanceof LongColMultiplyLongColumn);
assertEquals(1,childExpr2.getChildExpressions().length);
assertTrue(childExpr2.getChildExpressions()[0] instanceof LongColModuloLongColumn);
assertEquals(8,childExpr2.getOutputColumn());
assertEquals(6,childExpr2.getChildExpressions()[0].getOutputColumn());
}
EqualityVerifier
@Test public void testSIMDGreaterThan(){
long a;
long b;
a=0;
b=0;
assertEquals(a > b ? 1 : 0,(b - a) >>> 63);
a=1;
b=0;
assertEquals(a > b ? 1 : 0,(b - a) >>> 63);
a=0;
b=1;
assertEquals(a > b ? 1 : 0,(b - a) >>> 63);
}
InternalCallVerifier EqualityVerifier
@Test public void testVectorizeAndOrProjectionExpression() throws HiveException {
ExprNodeColumnDesc col1Expr=new ExprNodeColumnDesc(Integer.class,"col1","table",false);
ExprNodeConstantDesc constDesc=new ExprNodeConstantDesc(new Integer(10));
GenericUDFOPGreaterThan udf=new GenericUDFOPGreaterThan();
ExprNodeGenericFuncDesc greaterExprDesc=new ExprNodeGenericFuncDesc();
greaterExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
greaterExprDesc.setGenericUDF(udf);
List children1=new ArrayList(2);
children1.add(col1Expr);
children1.add(constDesc);
greaterExprDesc.setChildren(children1);
ExprNodeColumnDesc col2Expr=new ExprNodeColumnDesc(Boolean.class,"col2","table",false);
GenericUDFOPAnd andUdf=new GenericUDFOPAnd();
ExprNodeGenericFuncDesc andExprDesc=new ExprNodeGenericFuncDesc();
andExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
andExprDesc.setGenericUDF(andUdf);
List children3=new ArrayList(2);
children3.add(greaterExprDesc);
children3.add(col2Expr);
andExprDesc.setChildren(children3);
List columns=new ArrayList();
columns.add("col1");
columns.add("col2");
VectorizationContext vc=new VectorizationContext("name",columns);
VectorExpression veAnd=vc.getVectorExpression(andExprDesc,VectorExpressionDescriptor.Mode.FILTER);
assertEquals(veAnd.getClass(),FilterExprAndExpr.class);
assertEquals(veAnd.getChildExpressions()[0].getClass(),FilterLongColGreaterLongScalar.class);
assertEquals(veAnd.getChildExpressions()[1].getClass(),SelectColumnIsTrue.class);
veAnd=vc.getVectorExpression(andExprDesc,VectorExpressionDescriptor.Mode.PROJECTION);
assertEquals(veAnd.getClass(),ColAndCol.class);
assertEquals(1,veAnd.getChildExpressions().length);
assertEquals(veAnd.getChildExpressions()[0].getClass(),LongColGreaterLongScalar.class);
assertEquals(2,((ColAndCol)veAnd).getColNum1());
assertEquals(1,((ColAndCol)veAnd).getColNum2());
assertEquals(3,((ColAndCol)veAnd).getOutputColumn());
GenericUDFOPOr orUdf=new GenericUDFOPOr();
ExprNodeGenericFuncDesc orExprDesc=new ExprNodeGenericFuncDesc();
orExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
orExprDesc.setGenericUDF(orUdf);
List children4=new ArrayList(2);
children4.add(greaterExprDesc);
children4.add(col2Expr);
orExprDesc.setChildren(children4);
vc=new VectorizationContext("name",columns);
VectorExpression veOr=vc.getVectorExpression(orExprDesc,VectorExpressionDescriptor.Mode.FILTER);
assertEquals(veOr.getClass(),FilterExprOrExpr.class);
assertEquals(veOr.getChildExpressions()[0].getClass(),FilterLongColGreaterLongScalar.class);
assertEquals(veOr.getChildExpressions()[1].getClass(),SelectColumnIsTrue.class);
veOr=vc.getVectorExpression(orExprDesc,VectorExpressionDescriptor.Mode.PROJECTION);
assertEquals(veOr.getClass(),ColOrCol.class);
assertEquals(1,veAnd.getChildExpressions().length);
assertEquals(veAnd.getChildExpressions()[0].getClass(),LongColGreaterLongScalar.class);
assertEquals(2,((ColOrCol)veOr).getColNum1());
assertEquals(1,((ColOrCol)veOr).getColNum2());
assertEquals(3,((ColOrCol)veOr).getOutputColumn());
}
Class: org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch EqualityVerifier
@Test public void testVectorizedRowBatchCreate(){
VectorizedRowBatch batch=makeBatch();
Assert.assertEquals(3,batch.numCols);
Assert.assertEquals(VectorizedRowBatch.DEFAULT_SIZE,batch.size);
Assert.assertEquals(((LongColumnVector)batch.cols[0]).vector.length,VectorizedRowBatch.DEFAULT_SIZE);
Assert.assertEquals(((DoubleColumnVector)batch.cols[1]).vector.length,VectorizedRowBatch.DEFAULT_SIZE);
Assert.assertEquals(((BytesColumnVector)batch.cols[2]).vector.length,VectorizedRowBatch.DEFAULT_SIZE);
}
Class: org.apache.hadoop.hive.ql.exec.vector.expressions.TestConstantVectorExpression APIUtilityVerifier BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testConstantExpression(){
ConstantVectorExpression longCve=new ConstantVectorExpression(0,17);
ConstantVectorExpression doubleCve=new ConstantVectorExpression(1,17.34);
String str="alpha";
ConstantVectorExpression bytesCve=new ConstantVectorExpression(2,str.getBytes());
HiveDecimal decVal=HiveDecimal.create("25.8");
ConstantVectorExpression decimalCve=new ConstantVectorExpression(3,decVal);
ConstantVectorExpression nullCve=new ConstantVectorExpression(4,"string",true);
int size=20;
VectorizedRowBatch vrg=VectorizedRowGroupGenUtil.getVectorizedRowBatch(size,5,0);
LongColumnVector lcv=(LongColumnVector)vrg.cols[0];
DoubleColumnVector dcv=new DoubleColumnVector(size);
BytesColumnVector bcv=new BytesColumnVector(size);
DecimalColumnVector dv=new DecimalColumnVector(5,1);
BytesColumnVector bcvn=new BytesColumnVector(size);
vrg.cols[1]=dcv;
vrg.cols[2]=bcv;
vrg.cols[3]=dv;
vrg.cols[4]=bcvn;
longCve.evaluate(vrg);
doubleCve.evaluate(vrg);
bytesCve.evaluate(vrg);
decimalCve.evaluate(vrg);
nullCve.evaluate(vrg);
assertTrue(lcv.isRepeating);
assertTrue(dcv.isRepeating);
assertTrue(bcv.isRepeating);
assertEquals(17,lcv.vector[0]);
assertTrue(17.34 == dcv.vector[0]);
assertTrue(bcvn.isRepeating);
assertTrue(bcvn.isNull[0]);
assertTrue(!bcvn.noNulls);
byte[] alphaBytes="alpha".getBytes();
assertTrue(bcv.length[0] == alphaBytes.length);
assertTrue(sameFirstKBytes(alphaBytes,bcv.vector[0],alphaBytes.length));
((BytesColumnVector)(vrg.cols[2])).vector[0]="beta".getBytes();
bytesCve.evaluate(vrg);
assertTrue(bcv.length[0] == alphaBytes.length);
assertTrue(sameFirstKBytes(alphaBytes,bcv.vector[0],alphaBytes.length));
assertTrue(25.8 == dv.vector[0].getHiveDecimal().doubleValue());
((DecimalColumnVector)(vrg.cols[3])).vector[0].set(HiveDecimal.create("39.7"));
decimalCve.evaluate(vrg);
assertTrue(25.8 == dv.vector[0].getHiveDecimal().doubleValue());
}
Class: org.apache.hadoop.hive.ql.exec.vector.expressions.TestDecimalUtil EqualityVerifier
@Test public void testFloor(){
DecimalColumnVector dcv=new DecimalColumnVector(4,20,13);
HiveDecimal d1=HiveDecimal.create("19.56778");
HiveDecimal expected1=HiveDecimal.create("19");
DecimalUtil.floor(0,d1,dcv);
Assert.assertEquals(0,expected1.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d2=HiveDecimal.create("23.00000");
Assert.assertEquals(0,d2.scale());
HiveDecimal expected2=HiveDecimal.create("23");
DecimalUtil.floor(0,d2,dcv);
Assert.assertEquals(0,expected2.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d3=HiveDecimal.create("-25.34567");
HiveDecimal expected3=HiveDecimal.create("-26");
DecimalUtil.floor(0,d3,dcv);
Assert.assertEquals(0,expected3.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d4=HiveDecimal.create("-17.00000");
Assert.assertEquals(0,d4.scale());
HiveDecimal expected4=HiveDecimal.create("-17");
DecimalUtil.floor(0,d4,dcv);
Assert.assertEquals(0,expected4.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d5=HiveDecimal.create("-0.30000");
Assert.assertEquals(1,d5.scale());
HiveDecimal expected5=HiveDecimal.create("-1");
DecimalUtil.floor(0,d5,dcv);
Assert.assertEquals(0,expected5.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d6=HiveDecimal.create("0.30000");
Assert.assertEquals(1,d6.scale());
HiveDecimal expected6=HiveDecimal.create("0");
DecimalUtil.floor(0,d6,dcv);
Assert.assertEquals(0,expected6.compareTo(dcv.vector[0].getHiveDecimal()));
}
EqualityVerifier
@Test public void testNegate(){
DecimalColumnVector dcv=new DecimalColumnVector(4,20,13);
HiveDecimal d1=HiveDecimal.create("19.56778");
HiveDecimal expected1=HiveDecimal.create("-19.56778");
DecimalUtil.negate(0,d1,dcv);
Assert.assertEquals(0,expected1.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d2=HiveDecimal.create("-25.34567");
HiveDecimal expected2=HiveDecimal.create("25.34567");
DecimalUtil.negate(0,d2,dcv);
Assert.assertEquals(0,expected2.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d3=HiveDecimal.create("0.00000");
Assert.assertEquals(0,d3.scale());
HiveDecimal expected3=HiveDecimal.create("0");
DecimalUtil.negate(0,d3,dcv);
Assert.assertEquals(0,expected3.compareTo(dcv.vector[0].getHiveDecimal()));
}
EqualityVerifier
@Test public void testCeiling(){
DecimalColumnVector dcv=new DecimalColumnVector(4,20,13);
HiveDecimal d1=HiveDecimal.create("19.56778");
HiveDecimal expected1=HiveDecimal.create("20");
DecimalUtil.ceiling(0,d1,dcv);
Assert.assertEquals(0,expected1.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d2=HiveDecimal.create("23.00000");
Assert.assertEquals(0,d2.scale());
HiveDecimal expected2=HiveDecimal.create("23");
DecimalUtil.ceiling(0,d2,dcv);
Assert.assertEquals(0,expected2.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d3=HiveDecimal.create("-25.34567");
HiveDecimal expected3=HiveDecimal.create("-25");
DecimalUtil.ceiling(0,d3,dcv);
Assert.assertEquals(0,expected3.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d4=HiveDecimal.create("-17.00000");
Assert.assertEquals(0,d4.scale());
HiveDecimal expected4=HiveDecimal.create("-17");
DecimalUtil.ceiling(0,d4,dcv);
Assert.assertEquals(0,expected4.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d5=HiveDecimal.create("-0.30000");
Assert.assertEquals(1,d5.scale());
HiveDecimal expected5=HiveDecimal.create("0");
DecimalUtil.ceiling(0,d5,dcv);
Assert.assertEquals(0,expected5.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d6=HiveDecimal.create("0.30000");
Assert.assertEquals(1,d6.scale());
HiveDecimal expected6=HiveDecimal.create("1");
DecimalUtil.ceiling(0,d6,dcv);
Assert.assertEquals(0,expected6.compareTo(dcv.vector[0].getHiveDecimal()));
}
EqualityVerifier
@Test public void testSign(){
LongColumnVector lcv=new LongColumnVector(4);
HiveDecimal d1=HiveDecimal.create("19.56778");
DecimalUtil.sign(0,d1,lcv);
Assert.assertEquals(1,lcv.vector[0]);
HiveDecimal d2=HiveDecimal.create("-25.34567");
DecimalUtil.sign(0,d2,lcv);
Assert.assertEquals(-1,lcv.vector[0]);
HiveDecimal d3=HiveDecimal.create("0.00000");
Assert.assertEquals(0,d3.scale());
d3.setScale(5);
DecimalUtil.sign(0,d3,lcv);
Assert.assertEquals(0,lcv.vector[0]);
}
EqualityVerifier
@Test public void testRoundWithDigits(){
DecimalColumnVector dcv=new DecimalColumnVector(4,20,3);
HiveDecimal d1=HiveDecimal.create("19.56778");
HiveDecimal expected1=HiveDecimal.create("19.568");
DecimalUtil.round(0,d1,dcv);
Assert.assertEquals(0,expected1.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d2=HiveDecimal.create("23.56700");
Assert.assertEquals(3,d2.scale());
HiveDecimal expected2=HiveDecimal.create("23.567");
DecimalUtil.round(0,d2,dcv);
Assert.assertEquals(0,expected2.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d3=HiveDecimal.create("-25.34567");
HiveDecimal expected3=HiveDecimal.create("-25.346");
DecimalUtil.round(0,d3,dcv);
Assert.assertEquals(0,expected3.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d4=HiveDecimal.create("-17.23400");
Assert.assertEquals(3,d4.scale());
HiveDecimal expected4=HiveDecimal.create("-17.234");
DecimalUtil.round(0,d4,dcv);
Assert.assertEquals(0,expected4.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d5=HiveDecimal.create("19.36748");
HiveDecimal expected5=HiveDecimal.create("19.367");
DecimalUtil.round(0,d5,dcv);
Assert.assertEquals(0,expected5.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d6=HiveDecimal.create("-25.54537");
HiveDecimal expected6=HiveDecimal.create("-25.545");
DecimalUtil.round(0,d6,dcv);
Assert.assertEquals(0,expected6.compareTo(dcv.vector[0].getHiveDecimal()));
}
EqualityVerifier
@Test public void testRound(){
DecimalColumnVector dcv=new DecimalColumnVector(4,20,0);
HiveDecimal d1=HiveDecimal.create("19.56778");
HiveDecimal expected1=HiveDecimal.create("20");
DecimalUtil.round(0,d1,dcv);
Assert.assertEquals(0,expected1.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d2=HiveDecimal.create("23.00000");
Assert.assertEquals(0,d2.scale());
HiveDecimal expected2=HiveDecimal.create("23");
DecimalUtil.round(0,d2,dcv);
Assert.assertEquals(0,expected2.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d3=HiveDecimal.create("-25.34567");
HiveDecimal expected3=HiveDecimal.create("-25");
DecimalUtil.round(0,d3,dcv);
Assert.assertEquals(0,expected3.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d4=HiveDecimal.create("-17.00000");
Assert.assertEquals(0,d4.scale());
HiveDecimal expected4=HiveDecimal.create("-17");
DecimalUtil.round(0,d4,dcv);
Assert.assertEquals(0,expected4.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d5=HiveDecimal.create("19.36778");
HiveDecimal expected5=HiveDecimal.create("19");
DecimalUtil.round(0,d5,dcv);
Assert.assertEquals(0,expected5.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d6=HiveDecimal.create("-25.54567");
HiveDecimal expected6=HiveDecimal.create("-26");
DecimalUtil.round(0,d6,dcv);
Assert.assertEquals(0,expected6.compareTo(dcv.vector[0].getHiveDecimal()));
}
EqualityVerifier
@Test public void testAbs(){
DecimalColumnVector dcv=new DecimalColumnVector(4,20,13);
HiveDecimal d1=HiveDecimal.create("19.56778");
DecimalUtil.abs(0,d1,dcv);
Assert.assertEquals(0,d1.compareTo(dcv.vector[0].getHiveDecimal()));
HiveDecimal d2=HiveDecimal.create("-25.34567");
HiveDecimal expected2=HiveDecimal.create("25.34567");
DecimalUtil.abs(0,d2,dcv);
Assert.assertEquals(0,expected2.compareTo(dcv.vector[0].getHiveDecimal()));
}
Class: org.apache.hadoop.hive.ql.exec.vector.expressions.TestUnaryMinus IterativeVerifier EqualityVerifier PublicFieldVerifier
@Test public void testUnaryMinus(){
VectorizedRowBatch vrg=VectorizedRowGroupGenUtil.getVectorizedRowBatch(1024,2,23);
LongColUnaryMinus expr=new LongColUnaryMinus(0,1);
expr.evaluate(vrg);
long[] inVector=((LongColumnVector)vrg.cols[0]).vector;
long[] outVector=((LongColumnVector)vrg.cols[1]).vector;
for (int i=0; i < outVector.length; i++) {
assertEquals(0,inVector[i] + outVector[i]);
}
}
Class: org.apache.hadoop.hive.ql.exec.vector.expressions.TestVectorArithmeticExpressions BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testLongColAddLongScalarWithRepeating(){
LongColumnVector in, out;
VectorizedRowBatch batch;
LongColAddLongScalar expr;
batch=getVectorizedRowBatchSingleLongVector(VectorizedRowBatch.DEFAULT_SIZE);
in=(LongColumnVector)batch.cols[0];
in.isRepeating=true;
out=(LongColumnVector)batch.cols[1];
out.isRepeating=false;
expr=new LongColAddLongScalar(0,23,1);
expr.evaluate(batch);
Assert.assertTrue(out.isRepeating);
Assert.assertTrue(out.noNulls);
Assert.assertEquals(out.vector[0],0 * 37 + 23);
batch=getVectorizedRowBatchSingleLongVector(VectorizedRowBatch.DEFAULT_SIZE);
in=(LongColumnVector)batch.cols[0];
in.isRepeating=true;
in.noNulls=false;
in.isNull[0]=true;
out=(LongColumnVector)batch.cols[1];
out.isRepeating=false;
out.isNull[0]=false;
out.noNulls=true;
expr=new LongColAddLongScalar(0,23,1);
expr.evaluate(batch);
Assert.assertTrue(out.isRepeating);
Assert.assertFalse(out.noNulls);
Assert.assertEquals(true,out.isNull[0]);
verifyLongNullDataVectorEntries(out,batch.selected,batch.selectedInUse,batch.size);
}
IterativeVerifier BranchVerifier BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testLongColAddLongScalarWithNulls(){
VectorizedRowBatch batch=getVectorizedRowBatchSingleLongVector(VectorizedRowBatch.DEFAULT_SIZE);
LongColumnVector lcv=(LongColumnVector)batch.cols[0];
LongColumnVector lcvOut=(LongColumnVector)batch.cols[1];
TestVectorizedRowBatch.addRandomNulls(lcv);
LongColAddLongScalar expr=new LongColAddLongScalar(0,23,1);
expr.evaluate(batch);
for (int i=0; i < VectorizedRowBatch.DEFAULT_SIZE; i++) {
if (!lcv.isNull[i]) {
Assert.assertEquals(i * 37 + 23,lcvOut.vector[i]);
}
else {
Assert.assertTrue(lcvOut.isNull[i]);
}
}
Assert.assertFalse(lcvOut.noNulls);
Assert.assertFalse(lcvOut.isRepeating);
verifyLongNullDataVectorEntries(lcvOut,batch.selected,batch.selectedInUse,batch.size);
}
APIUtilityVerifier IterativeVerifier BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testLongColAddLongColumn(){
int seed=17;
VectorizedRowBatch vrg=VectorizedRowGroupGenUtil.getVectorizedRowBatch(VectorizedRowBatch.DEFAULT_SIZE,6,seed);
LongColumnVector lcv0=(LongColumnVector)vrg.cols[0];
LongColumnVector lcv1=(LongColumnVector)vrg.cols[1];
LongColumnVector lcv2=(LongColumnVector)vrg.cols[2];
LongColumnVector lcv3=(LongColumnVector)vrg.cols[3];
LongColumnVector lcv4=(LongColumnVector)vrg.cols[4];
LongColumnVector lcv5=(LongColumnVector)vrg.cols[5];
LongColAddLongColumn expr=new LongColAddLongColumn(0,1,2);
expr.evaluate(vrg);
for (int i=0; i < VectorizedRowBatch.DEFAULT_SIZE; i++) {
assertEquals((i + 1) * seed * 3,lcv2.vector[i]);
}
assertTrue(lcv2.noNulls);
lcv1.noNulls=false;
lcv1.isNull[1]=true;
lcv2.isRepeating=true;
lcv2.noNulls=true;
expr.evaluate(vrg);
assertTrue(lcv2.isNull[1]);
assertFalse(lcv2.noNulls);
assertFalse(lcv2.isRepeating);
verifyLongNullDataVectorEntries(lcv2,vrg.selected,vrg.selectedInUse,vrg.size);
lcv0.noNulls=false;
lcv0.isNull[1]=true;
lcv0.isNull[3]=true;
expr.evaluate(vrg);
assertTrue(lcv2.isNull[1]);
assertTrue(lcv2.isNull[3]);
assertFalse(lcv2.noNulls);
verifyLongNullDataVectorEntries(lcv2,vrg.selected,vrg.selectedInUse,vrg.size);
lcv3.isRepeating=true;
LongColAddLongColumn expr2=new LongColAddLongColumn(3,4,5);
expr2.evaluate(vrg);
for (int i=0; i < VectorizedRowBatch.DEFAULT_SIZE; i++) {
assertEquals(seed * (4 + 5 * (i + 1)),lcv5.vector[i]);
}
lcv4.noNulls=false;
lcv4.isNull[0]=true;
expr2.evaluate(vrg);
assertTrue(lcv5.isNull[0]);
assertFalse(lcv5.noNulls);
verifyLongNullDataVectorEntries(lcv5,vrg.selected,vrg.selectedInUse,vrg.size);
lcv3.isRepeating=true;
lcv3.noNulls=false;
lcv3.isNull[0]=true;
expr2.evaluate(vrg);
assertFalse(lcv5.noNulls);
assertTrue(lcv5.isRepeating);
assertTrue(lcv5.isNull[0]);
verifyLongNullDataVectorEntries(lcv5,vrg.selected,vrg.selectedInUse,vrg.size);
vrg.selectedInUse=false;
lcv0.noNulls=true;
lcv1.noNulls=true;
lcv0.isRepeating=false;
lcv1.isRepeating=false;
lcv2.noNulls=false;
lcv2.isRepeating=true;
expr.evaluate(vrg);
assertTrue(lcv2.noNulls);
assertFalse(lcv2.isRepeating);
}
IterativeVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testLongColAddLongScalarNoNulls(){
VectorizedRowBatch vrg=getVectorizedRowBatchSingleLongVector(VectorizedRowBatch.DEFAULT_SIZE);
LongColAddLongScalar expr=new LongColAddLongScalar(0,23,1);
expr.evaluate(vrg);
for (int i=0; i < VectorizedRowBatch.DEFAULT_SIZE; i++) {
Assert.assertEquals(i * 37 + 23,((LongColumnVector)vrg.cols[1]).vector[i]);
}
Assert.assertTrue(((LongColumnVector)vrg.cols[1]).noNulls);
Assert.assertFalse(((LongColumnVector)vrg.cols[1]).isRepeating);
}
Class: org.apache.hadoop.hive.ql.exec.vector.expressions.TestVectorConditionalExpressions EqualityVerifier PublicFieldVerifier
@Test public void testDoubleScalarColumnIfExpr(){
VectorizedRowBatch batch=getBatch1Long3DoubleVectors();
VectorExpression expr=new IfExprDoubleScalarDoubleColumn(0,100.0d,2,3);
DoubleColumnVector r=(DoubleColumnVector)batch.cols[3];
expr.evaluate(batch);
assertEquals(true,1d == r.vector[0]);
assertEquals(true,2d == r.vector[1]);
assertEquals(true,100d == r.vector[2]);
assertEquals(true,100d == r.vector[3]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testDoubleColumnScalarIfExpr(){
VectorizedRowBatch batch=getBatch1Long3DoubleVectors();
VectorExpression expr=new IfExprDoubleColumnDoubleScalar(0,1,200d,3);
DoubleColumnVector r=(DoubleColumnVector)batch.cols[3];
expr.evaluate(batch);
assertEquals(true,200d == r.vector[0]);
assertEquals(true,200d == r.vector[1]);
assertEquals(true,-3d == r.vector[2]);
assertEquals(true,-4d == r.vector[3]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testLongScalarScalarIfExpr(){
VectorizedRowBatch batch=getBatch4LongVectors();
VectorExpression expr=new IfExprLongScalarLongScalar(0,100,200,3);
LongColumnVector r=(LongColumnVector)batch.cols[3];
expr.evaluate(batch);
assertEquals(200,r.vector[0]);
assertEquals(200,r.vector[1]);
assertEquals(100,r.vector[2]);
assertEquals(100,r.vector[3]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testLongColumnColumnIfExpr(){
VectorizedRowBatch batch=getBatch4LongVectors();
VectorExpression expr=new IfExprLongColumnLongColumn(0,1,2,3);
expr.evaluate(batch);
LongColumnVector r=(LongColumnVector)batch.cols[3];
assertEquals(1,r.vector[0]);
assertEquals(2,r.vector[1]);
assertEquals(-3,r.vector[2]);
assertEquals(-4,r.vector[3]);
assertEquals(true,r.noNulls);
assertEquals(false,r.isRepeating);
batch=getBatch4LongVectors();
r=(LongColumnVector)batch.cols[3];
batch.cols[0].isRepeating=true;
expr.evaluate(batch);
assertEquals(1,r.vector[0]);
assertEquals(4,r.vector[3]);
batch=getBatch4LongVectors();
r=(LongColumnVector)batch.cols[3];
batch.cols[1].isRepeating=true;
expr.evaluate(batch);
assertEquals(1,r.vector[0]);
assertEquals(2,r.vector[1]);
assertEquals(-1,r.vector[2]);
assertEquals(-1,r.vector[3]);
batch=getBatch4LongVectors();
r=(LongColumnVector)batch.cols[3];
batch.cols[2].isRepeating=true;
expr.evaluate(batch);
assertEquals(1,r.vector[0]);
assertEquals(1,r.vector[1]);
assertEquals(-3,r.vector[2]);
assertEquals(-4,r.vector[3]);
batch=getBatch4LongVectors();
r=(LongColumnVector)batch.cols[3];
batch.cols[0].noNulls=false;
batch.cols[0].isNull[1]=true;
batch.cols[0].isNull[2]=true;
expr.evaluate(batch);
assertEquals(1,r.vector[0]);
assertEquals(2,r.vector[1]);
assertEquals(3,r.vector[2]);
assertEquals(-4,r.vector[3]);
assertEquals(true,r.noNulls);
assertEquals(false,r.isRepeating);
batch=getBatch4LongVectors();
r=(LongColumnVector)batch.cols[3];
batch.cols[1].noNulls=false;
batch.cols[1].isNull[1]=true;
batch.cols[1].isNull[2]=true;
expr.evaluate(batch);
assertEquals(1,r.vector[0]);
assertEquals(2,r.vector[1]);
assertEquals(true,r.isNull[2]);
assertEquals(-4,r.vector[3]);
assertEquals(false,r.noNulls);
assertEquals(false,r.isRepeating);
batch=getBatch4LongVectors();
r=(LongColumnVector)batch.cols[3];
batch.cols[2].noNulls=false;
batch.cols[2].isNull[1]=true;
batch.cols[2].isNull[2]=true;
expr.evaluate(batch);
assertEquals(1,r.vector[0]);
assertEquals(true,r.isNull[1]);
assertEquals(-3,r.vector[2]);
assertEquals(-4,r.vector[3]);
assertEquals(false,r.noNulls);
assertEquals(false,r.isRepeating);
batch=getBatch4LongVectors();
r=(LongColumnVector)batch.cols[3];
batch.cols[1].noNulls=false;
batch.cols[1].isNull[0]=true;
batch.cols[1].isRepeating=true;
expr.evaluate(batch);
assertEquals(1,r.vector[0]);
assertEquals(2,r.vector[1]);
assertEquals(true,r.isNull[2]);
assertEquals(true,r.isNull[3]);
assertEquals(false,r.noNulls);
assertEquals(false,r.isRepeating);
batch=getBatch4LongVectors();
r=(LongColumnVector)batch.cols[3];
batch.cols[2].noNulls=false;
batch.cols[2].isNull[0]=true;
batch.cols[2].isRepeating=true;
expr.evaluate(batch);
assertEquals(true,r.isNull[0]);
assertEquals(true,r.isNull[1]);
assertEquals(-3,r.vector[2]);
assertEquals(-4,r.vector[3]);
assertEquals(false,r.noNulls);
assertEquals(false,r.isRepeating);
}
EqualityVerifier PublicFieldVerifier
@Test public void testDoubleColumnColumnIfExpr(){
VectorizedRowBatch batch=getBatch1Long3DoubleVectors();
VectorExpression expr=new IfExprDoubleColumnDoubleColumn(0,1,2,3);
expr.evaluate(batch);
DoubleColumnVector r=(DoubleColumnVector)batch.cols[3];
assertEquals(true,1d == r.vector[0]);
assertEquals(true,2d == r.vector[1]);
assertEquals(true,-3d == r.vector[2]);
assertEquals(true,-4d == r.vector[3]);
assertEquals(true,r.noNulls);
assertEquals(false,r.isRepeating);
}
EqualityVerifier PublicFieldVerifier
@Test public void testLongColumnScalarIfExpr(){
VectorizedRowBatch batch=getBatch4LongVectors();
VectorExpression expr=new IfExprLongColumnLongScalar(0,1,100,3);
LongColumnVector r=(LongColumnVector)batch.cols[3];
expr.evaluate(batch);
assertEquals(100,r.vector[0]);
assertEquals(100,r.vector[1]);
assertEquals(-3,r.vector[2]);
assertEquals(-4,r.vector[3]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testDoubleScalarScalarIfExpr(){
VectorizedRowBatch batch=getBatch1Long3DoubleVectors();
VectorExpression expr=new IfExprDoubleScalarDoubleScalar(0,100.0d,200.0d,3);
DoubleColumnVector r=(DoubleColumnVector)batch.cols[3];
expr.evaluate(batch);
assertEquals(true,200d == r.vector[0]);
assertEquals(true,200d == r.vector[1]);
assertEquals(true,100d == r.vector[2]);
assertEquals(true,100d == r.vector[3]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testLongScalarColumnIfExpr(){
VectorizedRowBatch batch=getBatch4LongVectors();
VectorExpression expr=new IfExprLongScalarLongColumn(0,100,2,3);
LongColumnVector r=(LongColumnVector)batch.cols[3];
expr.evaluate(batch);
assertEquals(1,r.vector[0]);
assertEquals(2,r.vector[1]);
assertEquals(100,r.vector[2]);
assertEquals(100,r.vector[3]);
}
Class: org.apache.hadoop.hive.ql.exec.vector.expressions.TestVectorExpressionWriters EqualityVerifier
@Test public void testTimeStampUtils(){
Timestamp ts=new Timestamp(0);
TimestampUtils.assignTimeInNanoSec(1234567891,ts);
Assert.assertEquals(234567891,ts.getNanos());
Assert.assertEquals(1234567891,TimestampUtils.getTimeNanoSec(ts));
TimestampUtils.assignTimeInNanoSec(-1234567891,ts);
Assert.assertEquals((1000000000 - 234567891),ts.getNanos());
Assert.assertEquals(-1234567891,TimestampUtils.getTimeNanoSec(ts));
TimestampUtils.assignTimeInNanoSec(234567891,ts);
Assert.assertEquals(234567891,ts.getNanos());
Assert.assertEquals(234567891,TimestampUtils.getTimeNanoSec(ts));
TimestampUtils.assignTimeInNanoSec(-234567891,ts);
Assert.assertEquals((1000000000 - 234567891),ts.getNanos());
Assert.assertEquals(-234567891,TimestampUtils.getTimeNanoSec(ts));
long big=152414813551296L;
TimestampUtils.assignTimeInNanoSec(big,ts);
Assert.assertEquals(big % 1000000000,ts.getNanos());
Assert.assertEquals(big,TimestampUtils.getTimeNanoSec(ts));
big=-152414813551296L;
TimestampUtils.assignTimeInNanoSec(big,ts);
Assert.assertEquals((1000000000 + (big % 1000000000)),ts.getNanos());
Assert.assertEquals(big,TimestampUtils.getTimeNanoSec(ts));
big=-1794750230000828416L;
ts=new Timestamp(0);
TimestampUtils.assignTimeInNanoSec(big,ts);
Assert.assertEquals((1000000000 + big % 1000000000),ts.getNanos());
Assert.assertEquals(big,TimestampUtils.getTimeNanoSec(ts));
big=1700000000000000016L;
ts=new Timestamp(0);
TimestampUtils.assignTimeInNanoSec(big,ts);
Assert.assertEquals(big % 1000000000,ts.getNanos());
Assert.assertEquals(big,TimestampUtils.getTimeNanoSec(ts));
big=-1700000000000000016L;
ts=new Timestamp(0);
TimestampUtils.assignTimeInNanoSec(big,ts);
Assert.assertEquals((1000000000 + big % 1000000000),ts.getNanos());
Assert.assertEquals(big,TimestampUtils.getTimeNanoSec(ts));
}
Class: org.apache.hadoop.hive.ql.exec.vector.expressions.TestVectorFilterExpressions APIUtilityVerifier EqualityVerifier
@Test public void testFilterTimestampNotBetween(){
int seed=17;
VectorizedRowBatch vrb=VectorizedRowGroupGenUtil.getVectorizedRowBatch(5,2,seed);
LongColumnVector lcv0=(LongColumnVector)vrb.cols[0];
long startTS=TimestampUtils.getTimeNanoSec(Timestamp.valueOf("2013-11-05 00:00:00.000000000"));
long endTS=TimestampUtils.getTimeNanoSec(Timestamp.valueOf("2013-11-05 00:00:00.000000010"));
Timestamp ts0=Timestamp.valueOf("2013-11-04 00:00:00.000000000");
lcv0.vector[0]=TimestampUtils.getTimeNanoSec(ts0);
Timestamp ts1=Timestamp.valueOf("2013-11-05 00:00:00.000000002");
lcv0.vector[1]=TimestampUtils.getTimeNanoSec(ts1);
Timestamp ts2=Timestamp.valueOf("2099-11-06 00:00:00.000");
lcv0.vector[2]=TimestampUtils.getTimeNanoSec(ts2);
vrb.size=3;
VectorExpression expr1=new FilterLongColumnNotBetween(0,startTS,endTS);
expr1.evaluate(vrb);
assertEquals(2,vrb.size);
assertEquals(true,vrb.selectedInUse);
assertEquals(0,vrb.selected[0]);
assertEquals(2,vrb.selected[1]);
}
APIUtilityVerifier EqualityVerifier
@Test public void testFilterLongColGreaterLongColumn(){
int seed=17;
VectorizedRowBatch b=VectorizedRowGroupGenUtil.getVectorizedRowBatch(VectorizedRowBatch.DEFAULT_SIZE,2,seed);
LongColumnVector lcv0=(LongColumnVector)b.cols[0];
LongColumnVector lcv1=(LongColumnVector)b.cols[1];
b.size=3;
FilterLongColGreaterLongColumn expr=new FilterLongColGreaterLongColumn(0,1);
lcv0.vector[0]=10;
lcv0.vector[1]=10;
lcv0.vector[2]=10;
lcv1.vector[0]=20;
lcv1.vector[1]=1;
lcv1.vector[2]=7;
expr.evaluate(b);
assertEquals(2,b.size);
assertEquals(1,b.selected[0]);
assertEquals(2,b.selected[1]);
lcv0.noNulls=false;
lcv0.isNull[1]=true;
expr.evaluate(b);
assertEquals(1,b.size);
assertEquals(2,b.selected[0]);
b.size=3;
b.selectedInUse=false;
lcv0.isRepeating=true;
lcv0.noNulls=true;
expr.evaluate(b);
assertEquals(2,b.size);
b.size=3;
b.selectedInUse=false;
lcv0.isNull[0]=true;
lcv0.noNulls=false;
expr.evaluate(b);
assertEquals(0,b.size);
b.size=3;
b.selectedInUse=false;
lcv0.isRepeating=false;
lcv1.noNulls=false;
lcv1.isNull[2]=true;
expr.evaluate(b);
assertEquals(0,b.size);
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testColOpScalarNumericFilterNullAndRepeatingLogic(){
FilterLongColGreaterLongScalar f=new FilterLongColGreaterLongScalar(0,1);
VectorizedRowBatch batch=this.getSimpleLongBatch();
batch.cols[0].noNulls=true;
batch.cols[0].isRepeating=false;
f.evaluate(batch);
Assert.assertEquals(2,batch.size);
Assert.assertTrue(batch.selectedInUse);
Assert.assertEquals(2,batch.selected[0]);
Assert.assertEquals(3,batch.selected[1]);
f=new FilterLongColGreaterLongScalar(0,-1);
batch=getSimpleLongBatch();
f.evaluate(batch);
Assert.assertFalse(batch.selectedInUse);
Assert.assertEquals(4,batch.size);
batch=getSimpleLongBatch();
f=new FilterLongColGreaterLongScalar(0,1);
batch.cols[0].noNulls=false;
batch.cols[0].isRepeating=false;
batch.cols[0].isNull[3]=true;
f.evaluate(batch);
Assert.assertTrue(batch.selectedInUse);
Assert.assertEquals(1,batch.size);
Assert.assertEquals(2,batch.selected[0]);
batch=getSimpleLongBatch();
f=new FilterLongColGreaterLongScalar(0,-1);
batch.cols[0].noNulls=true;
batch.cols[0].isRepeating=true;
f.evaluate(batch);
Assert.assertFalse(batch.selectedInUse);
Assert.assertEquals(4,batch.size);
batch=getSimpleLongBatch();
batch.cols[0].noNulls=false;
batch.cols[0].isRepeating=true;
batch.cols[0].isNull[0]=true;
f.evaluate(batch);
Assert.assertEquals(0,batch.size);
}
APIUtilityVerifier EqualityVerifier
@Test public void testFilterLongColLessLongColumn(){
int seed=17;
VectorizedRowBatch vrg=VectorizedRowGroupGenUtil.getVectorizedRowBatch(5,3,seed);
LongColumnVector lcv0=(LongColumnVector)vrg.cols[0];
LongColumnVector lcv1=(LongColumnVector)vrg.cols[1];
LongColumnVector lcv2=(LongColumnVector)vrg.cols[2];
FilterLongColLessLongColumn expr=new FilterLongColLessLongColumn(2,1);
LongColAddLongScalar childExpr=new LongColAddLongScalar(0,10,2);
expr.setChildExpressions(new VectorExpression[]{childExpr});
lcv0.vector[0]=10;
lcv0.vector[1]=20;
lcv0.vector[2]=9;
lcv0.vector[3]=20;
lcv0.vector[4]=10;
lcv1.vector[0]=20;
lcv1.vector[1]=10;
lcv1.vector[2]=20;
lcv1.vector[3]=10;
lcv1.vector[4]=20;
expr.evaluate(vrg);
assertEquals(1,vrg.size);
assertEquals(2,vrg.selected[0]);
}
BooleanVerifier EqualityVerifier HybridVerifier
/**
* Spot check col < scalar for decimal.
*/
@Test public void testFilterDecimalColLessScalar(){
VectorizedRowBatch b=getVectorizedRowBatch1DecimalCol();
HiveDecimal scalar=HiveDecimal.create("0");
VectorExpression expr=new FilterDecimalColLessDecimalScalar(0,scalar);
expr.evaluate(b);
assertTrue(b.selectedInUse);
assertEquals(1,b.selected[0]);
assertEquals(1,b.size);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testFilterStringIn(){
int seed=17;
VectorizedRowBatch vrb=VectorizedRowGroupGenUtil.getVectorizedRowBatch(3,2,seed);
vrb.cols[0]=new BytesColumnVector();
BytesColumnVector bcv=(BytesColumnVector)vrb.cols[0];
bcv.initBuffer();
bcv.setVal(0,a,0,1);
bcv.setVal(1,b,0,1);
bcv.setVal(2,c,0,1);
VectorExpression expr=new FilterStringColumnInList(0);
byte[][] inList={b,c};
((FilterStringColumnInList)expr).setInListValues(inList);
expr.evaluate(vrb);
assertEquals(2,vrb.size);
assertTrue(vrb.selectedInUse);
assertEquals(1,vrb.selected[0]);
assertEquals(2,vrb.selected[1]);
vrb.selectedInUse=false;
vrb.size=3;
bcv.noNulls=false;
bcv.isNull[2]=true;
expr.evaluate(vrb);
assertEquals(1,vrb.size);
assertEquals(1,vrb.selected[0]);
assertTrue(vrb.selectedInUse);
vrb.selectedInUse=false;
vrb.size=3;
bcv.noNulls=true;
bcv.isRepeating=true;
expr.evaluate(vrb);
assertEquals(0,vrb.size);
vrb.selectedInUse=false;
vrb.size=3;
bcv.noNulls=false;
bcv.isRepeating=true;
bcv.isNull[0]=true;
bcv.setVal(0,b,0,1);
expr.evaluate(vrb);
assertEquals(0,vrb.size);
}
BooleanVerifier EqualityVerifier HybridVerifier
/**
* This tests the template for Decimal Column-Column comparison filters,
* called FilterDecimalColumnCompareColumn.txt. Only equal is tested for multiple
* cases because the logic is the same for <, >, <=, >=, == and !=.
*/
@Test public void testFilterDecimalColumnEqualDecimalColumn(){
VectorizedRowBatch b=getVectorizedRowBatch2DecimalCol();
VectorExpression expr=new FilterDecimalColEqualDecimalColumn(0,1);
expr.evaluate(b);
assertTrue(b.selectedInUse);
assertEquals(1,b.selected[0]);
assertEquals(1,b.size);
b=getVectorizedRowBatch2DecimalCol();
b.cols[0].noNulls=false;
b.cols[0].isNull[1]=true;
expr.evaluate(b);
assertEquals(0,b.size);
b=getVectorizedRowBatch2DecimalCol();
b.cols[0].isRepeating=true;
expr.evaluate(b);
assertEquals(0,b.size);
b=getVectorizedRowBatch2DecimalCol();
b.cols[0].isRepeating=true;
b.cols[0].noNulls=false;
b.cols[0].isNull[0]=true;
expr.evaluate(b);
assertEquals(0,b.size);
b=getVectorizedRowBatch2DecimalCol();
b.cols[0].noNulls=false;
b.cols[0].isNull[0]=true;
b.cols[1].noNulls=false;
b.cols[1].isNull[2]=true;
expr.evaluate(b);
assertEquals(1,b.size);
b=getVectorizedRowBatch2DecimalCol();
b.cols[0].isRepeating=true;
b.cols[1].isRepeating=true;
expr.evaluate(b);
assertEquals(0,b.size);
}
BooleanVerifier EqualityVerifier HybridVerifier
/**
* This tests the template for Decimal Column-Scalar comparison filters,
* called FilterDecimalColumnCompareScalar.txt. Only equal is tested for
* multiple cases because the logic is the same for <, >, <=, >=, == and !=.
*/
@Test public void testFilterDecimalColEqualDecimalScalar(){
VectorizedRowBatch b=getVectorizedRowBatch1DecimalCol();
HiveDecimal scalar=HiveDecimal.create("-3.30");
VectorExpression expr=new FilterDecimalColEqualDecimalScalar(0,scalar);
expr.evaluate(b);
assertTrue(b.selectedInUse);
assertEquals(1,b.selected[0]);
assertEquals(1,b.size);
b=getVectorizedRowBatch1DecimalCol();
b.cols[0].noNulls=false;
b.cols[0].isNull[1]=true;
expr.evaluate(b);
assertEquals(0,b.size);
b=getVectorizedRowBatch1DecimalCol();
b.cols[0].isRepeating=true;
expr.evaluate(b);
assertEquals(0,b.size);
b=getVectorizedRowBatch1DecimalCol();
b.cols[0].isRepeating=true;
b.cols[0].noNulls=false;
b.cols[0].isNull[0]=true;
expr.evaluate(b);
assertEquals(0,b.size);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testFilterLongNotBetween(){
int seed=17;
VectorizedRowBatch vrb=VectorizedRowGroupGenUtil.getVectorizedRowBatch(5,2,seed);
LongColumnVector lcv0=(LongColumnVector)vrb.cols[0];
lcv0.vector[0]=5;
lcv0.vector[1]=20;
lcv0.vector[2]=17;
lcv0.vector[3]=15;
lcv0.vector[4]=10;
VectorExpression expr=new FilterLongColumnNotBetween(0,10,20);
expr.evaluate(vrb);
assertEquals(1,vrb.size);
assertTrue(vrb.selectedInUse);
assertEquals(0,vrb.selected[0]);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testFilterDoubleNotBetween(){
int seed=17;
VectorizedRowBatch vrb=VectorizedRowGroupGenUtil.getVectorizedRowBatch(5,2,seed);
vrb.cols[0]=new DoubleColumnVector();
DoubleColumnVector dcv=(DoubleColumnVector)vrb.cols[0];
dcv.vector[0]=5;
dcv.vector[1]=20;
dcv.vector[2]=17;
dcv.vector[3]=15;
dcv.vector[4]=10;
VectorExpression expr=new FilterDoubleColumnNotBetween(0,10,20);
expr.evaluate(vrb);
assertEquals(1,vrb.size);
assertTrue(vrb.selectedInUse);
assertEquals(0,vrb.selected[0]);
}
EqualityVerifier
@Test public void testFilterLongColEqualLongScalar(){
VectorizedRowBatch vrg=VectorizedRowGroupGenUtil.getVectorizedRowBatch(1024,1,23);
FilterLongColEqualLongScalar expr=new FilterLongColEqualLongScalar(0,46);
expr.evaluate(vrg);
assertEquals(1,vrg.size);
assertEquals(1,vrg.selected[0]);
}
APIUtilityVerifier EqualityVerifier
@Test public void testFilterTimestampBetween(){
int seed=17;
VectorizedRowBatch vrb=VectorizedRowGroupGenUtil.getVectorizedRowBatch(5,2,seed);
LongColumnVector lcv0=(LongColumnVector)vrb.cols[0];
long startTS=0;
long endTS=TimestampUtils.getTimeNanoSec(Timestamp.valueOf("2013-11-05 00:00:00.000000000"));
Timestamp ts0=Timestamp.valueOf("1963-11-06 00:00:00.000");
lcv0.vector[0]=TimestampUtils.getTimeNanoSec(ts0);
Timestamp ts1=Timestamp.valueOf("1983-11-06 00:00:00.000");
lcv0.vector[1]=TimestampUtils.getTimeNanoSec(ts1);
Timestamp ts2=Timestamp.valueOf("2099-11-06 00:00:00.000");
lcv0.vector[2]=TimestampUtils.getTimeNanoSec(ts2);
vrb.size=3;
VectorExpression expr1=new FilterLongColumnBetween(0,startTS,endTS);
expr1.evaluate(vrb);
assertEquals(1,vrb.size);
assertEquals(true,vrb.selectedInUse);
assertEquals(1,vrb.selected[0]);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testFilterStringBetween(){
int seed=17;
VectorizedRowBatch vrb=VectorizedRowGroupGenUtil.getVectorizedRowBatch(3,2,seed);
vrb.cols[0]=new BytesColumnVector();
BytesColumnVector bcv=(BytesColumnVector)vrb.cols[0];
bcv.initBuffer();
bcv.setVal(0,a,0,1);
bcv.setVal(1,b,0,1);
bcv.setVal(2,c,0,1);
VectorExpression expr=new FilterStringColumnBetween(0,b,c);
expr.evaluate(vrb);
assertEquals(2,vrb.size);
assertTrue(vrb.selectedInUse);
assertEquals(1,vrb.selected[0]);
assertEquals(2,vrb.selected[1]);
vrb.selectedInUse=false;
vrb.size=3;
bcv.noNulls=false;
bcv.isNull[2]=true;
expr.evaluate(vrb);
assertEquals(1,vrb.size);
assertEquals(1,vrb.selected[0]);
assertTrue(vrb.selectedInUse);
vrb.selectedInUse=false;
vrb.size=3;
bcv.noNulls=true;
bcv.isRepeating=true;
expr.evaluate(vrb);
assertEquals(0,vrb.size);
vrb.selectedInUse=false;
vrb.size=3;
bcv.noNulls=false;
bcv.isRepeating=true;
bcv.isNull[0]=true;
bcv.setVal(0,b,0,1);
expr.evaluate(vrb);
assertEquals(0,vrb.size);
}
BooleanVerifier EqualityVerifier HybridVerifier
/**
* Spot check scalar > col for decimal.
*/
@Test public void testFilterDecimalScalarGreaterThanColumn(){
VectorizedRowBatch b=getVectorizedRowBatch1DecimalCol();
HiveDecimal scalar=HiveDecimal.create("0");
VectorExpression expr=new FilterDecimalScalarGreaterDecimalColumn(scalar,0);
expr.evaluate(b);
assertTrue(b.selectedInUse);
assertEquals(1,b.selected[0]);
assertEquals(1,b.size);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testFilterStringNotBetween(){
int seed=17;
VectorizedRowBatch vrb=VectorizedRowGroupGenUtil.getVectorizedRowBatch(3,2,seed);
vrb.cols[0]=new BytesColumnVector();
BytesColumnVector bcv=(BytesColumnVector)vrb.cols[0];
bcv.initBuffer();
bcv.setVal(0,a,0,1);
bcv.setVal(1,b,0,1);
bcv.setVal(2,c,0,1);
VectorExpression expr=new FilterStringColumnNotBetween(0,b,c);
expr.evaluate(vrb);
assertEquals(1,vrb.size);
assertTrue(vrb.selectedInUse);
assertEquals(0,vrb.selected[0]);
}
BooleanVerifier EqualityVerifier HybridVerifier
/**
* This tests the template for Decimal Scalar-Column comparison filters,
* called FilterDecimalScalarCompareColumn.txt. Only equal is tested for multiple
* cases because the logic is the same for <, >, <=, >=, == and !=.
*/
@Test public void testFilterDecimalScalarEqualDecimalColumn(){
VectorizedRowBatch b=getVectorizedRowBatch1DecimalCol();
HiveDecimal scalar=HiveDecimal.create("-3.30");
VectorExpression expr=new FilterDecimalScalarEqualDecimalColumn(scalar,0);
expr.evaluate(b);
assertTrue(b.selectedInUse);
assertEquals(1,b.selected[0]);
assertEquals(1,b.size);
b=getVectorizedRowBatch1DecimalCol();
b.cols[0].noNulls=false;
b.cols[0].isNull[1]=true;
expr.evaluate(b);
assertEquals(0,b.size);
b=getVectorizedRowBatch1DecimalCol();
b.cols[0].isRepeating=true;
expr.evaluate(b);
assertEquals(0,b.size);
b=getVectorizedRowBatch1DecimalCol();
b.cols[0].isRepeating=true;
b.cols[0].noNulls=false;
b.cols[0].isNull[0]=true;
expr.evaluate(b);
assertEquals(0,b.size);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testFilterLongBetween(){
int seed=17;
VectorizedRowBatch vrb=VectorizedRowGroupGenUtil.getVectorizedRowBatch(5,2,seed);
LongColumnVector lcv0=(LongColumnVector)vrb.cols[0];
VectorExpression expr1=new FilterLongColumnBetween(0,15,17);
lcv0.vector[0]=5;
lcv0.vector[1]=20;
lcv0.vector[2]=17;
lcv0.vector[3]=15;
lcv0.vector[4]=10;
expr1.evaluate(vrb);
assertEquals(2,vrb.size);
assertTrue(vrb.selectedInUse);
assertEquals(2,vrb.selected[0]);
assertEquals(3,vrb.selected[1]);
VectorizedRowBatch vrb1=VectorizedRowGroupGenUtil.getVectorizedRowBatch(5,2,seed);
lcv0=(LongColumnVector)vrb1.cols[0];
lcv0.vector[0]=5;
lcv0.vector[1]=20;
lcv0.vector[2]=17;
lcv0.vector[3]=15;
lcv0.vector[4]=10;
lcv0.noNulls=false;
lcv0.isNull[0]=true;
lcv0.isNull[2]=true;
expr1.evaluate(vrb1);
assertEquals(1,vrb1.size);
assertTrue(vrb1.selectedInUse);
assertEquals(3,vrb1.selected[0]);
VectorizedRowBatch vrb2=VectorizedRowGroupGenUtil.getVectorizedRowBatch(7,2,seed);
vrb2.selectedInUse=true;
vrb2.selected[0]=1;
vrb2.selected[1]=2;
vrb2.selected[2]=4;
vrb2.size=3;
lcv0=(LongColumnVector)vrb2.cols[0];
lcv0.vector[0]=5;
lcv0.vector[1]=20;
lcv0.vector[2]=17;
lcv0.vector[3]=15;
lcv0.vector[4]=10;
lcv0.vector[5]=19;
lcv0.vector[6]=21;
lcv0.noNulls=false;
lcv0.isNull[0]=true;
lcv0.isNull[2]=true;
lcv0.isNull[5]=true;
expr1.evaluate(vrb2);
assertEquals(0,vrb2.size);
VectorizedRowBatch vrb3=VectorizedRowGroupGenUtil.getVectorizedRowBatch(7,2,seed);
lcv0=(LongColumnVector)vrb3.cols[0];
lcv0.isRepeating=true;
lcv0.vector[0]=17;
lcv0.vector[1]=20;
lcv0.vector[2]=17;
lcv0.vector[3]=15;
lcv0.vector[4]=10;
expr1.evaluate(vrb3);
assertEquals(7,vrb3.size);
assertFalse(vrb3.selectedInUse);
assertTrue(lcv0.isRepeating);
lcv0.noNulls=false;
lcv0.vector[0]=17;
lcv0.isNull[0]=true;
expr1.evaluate(vrb3);
assertEquals(0,vrb3.size);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
/**
* Test the IN filter VectorExpression classes.
*/
@Test public void testFilterLongIn(){
int seed=17;
VectorizedRowBatch vrb=VectorizedRowGroupGenUtil.getVectorizedRowBatch(5,2,seed);
LongColumnVector lcv0=(LongColumnVector)vrb.cols[0];
long[] inList={5,20};
FilterLongColumnInList f=new FilterLongColumnInList(0);
f.setInListValues(inList);
VectorExpression expr1=f;
lcv0.vector[0]=5;
lcv0.vector[1]=20;
lcv0.vector[2]=17;
lcv0.vector[3]=15;
lcv0.vector[4]=10;
expr1.evaluate(vrb);
assertEquals(2,vrb.size);
assertTrue(vrb.selectedInUse);
assertEquals(0,vrb.selected[0]);
assertEquals(1,vrb.selected[1]);
VectorizedRowBatch vrb1=VectorizedRowGroupGenUtil.getVectorizedRowBatch(5,2,seed);
lcv0=(LongColumnVector)vrb1.cols[0];
lcv0.vector[0]=5;
lcv0.vector[1]=20;
lcv0.vector[2]=17;
lcv0.vector[3]=15;
lcv0.vector[4]=10;
lcv0.noNulls=false;
lcv0.isNull[0]=true;
lcv0.isNull[2]=true;
expr1.evaluate(vrb1);
assertEquals(1,vrb1.size);
assertTrue(vrb1.selectedInUse);
assertEquals(1,vrb1.selected[0]);
VectorizedRowBatch vrb2=VectorizedRowGroupGenUtil.getVectorizedRowBatch(7,2,seed);
vrb2.selectedInUse=true;
vrb2.selected[0]=1;
vrb2.selected[1]=2;
vrb2.selected[2]=4;
vrb2.size=3;
lcv0=(LongColumnVector)vrb2.cols[0];
lcv0.vector[0]=5;
lcv0.vector[1]=20;
lcv0.vector[2]=17;
lcv0.vector[3]=15;
lcv0.vector[4]=10;
lcv0.vector[5]=19;
lcv0.vector[6]=21;
lcv0.noNulls=false;
lcv0.isNull[0]=true;
lcv0.isNull[2]=true;
lcv0.isNull[5]=true;
expr1.evaluate(vrb2);
assertEquals(1,vrb2.size);
assertEquals(1,vrb2.selected[0]);
VectorizedRowBatch vrb3=VectorizedRowGroupGenUtil.getVectorizedRowBatch(7,2,seed);
lcv0=(LongColumnVector)vrb3.cols[0];
lcv0.isRepeating=true;
lcv0.vector[0]=5;
lcv0.vector[1]=20;
lcv0.vector[2]=17;
lcv0.vector[3]=15;
lcv0.vector[4]=10;
expr1.evaluate(vrb3);
assertEquals(7,vrb3.size);
assertFalse(vrb3.selectedInUse);
assertTrue(lcv0.isRepeating);
lcv0.noNulls=false;
lcv0.vector[0]=5;
lcv0.isNull[0]=true;
expr1.evaluate(vrb3);
assertEquals(0,vrb3.size);
}
BooleanVerifier EqualityVerifier HybridVerifier
/**
* Spot check col >= col for decimal.
*/
@Test public void testFilterDecimalColGreaterEqualCol(){
VectorizedRowBatch b=getVectorizedRowBatch2DecimalCol();
VectorExpression expr=new FilterDecimalColGreaterEqualDecimalColumn(0,1);
expr.evaluate(b);
assertTrue(b.selectedInUse);
assertEquals(0,b.selected[0]);
assertEquals(1,b.selected[1]);
assertEquals(2,b.size);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testFilterDoubleBetween(){
int seed=17;
VectorizedRowBatch vrb=VectorizedRowGroupGenUtil.getVectorizedRowBatch(5,2,seed);
DoubleColumnVector dcv0=new DoubleColumnVector();
vrb.cols[0]=dcv0;
dcv0.vector[0]=5;
dcv0.vector[1]=20;
dcv0.vector[2]=17;
dcv0.vector[3]=15;
dcv0.vector[4]=10;
VectorExpression expr=new FilterDoubleColumnBetween(0,20,21);
expr.evaluate(vrb);
assertEquals(1,vrb.size);
assertTrue(vrb.selectedInUse);
assertEquals(1,vrb.selected[0]);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testFilterDoubleIn(){
int seed=17;
VectorizedRowBatch vrb=VectorizedRowGroupGenUtil.getVectorizedRowBatch(5,2,seed);
DoubleColumnVector dcv0=new DoubleColumnVector();
vrb.cols[0]=dcv0;
double[] inList={5.0,20.2};
FilterDoubleColumnInList f=new FilterDoubleColumnInList(0);
f.setInListValues(inList);
VectorExpression expr1=f;
dcv0.vector[0]=5.0;
dcv0.vector[1]=20.2;
dcv0.vector[2]=17.0;
dcv0.vector[3]=15.0;
dcv0.vector[4]=10.0;
expr1.evaluate(vrb);
assertEquals(2,vrb.size);
assertTrue(vrb.selectedInUse);
assertEquals(0,vrb.selected[0]);
assertEquals(1,vrb.selected[1]);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testFilterLongScalarLessLongColumn(){
int seed=17;
VectorizedRowBatch vrb=VectorizedRowGroupGenUtil.getVectorizedRowBatch(5,2,seed);
LongColumnVector lcv0=(LongColumnVector)vrb.cols[0];
FilterLongScalarLessLongColumn expr1=new FilterLongScalarLessLongColumn(15,0);
lcv0.vector[0]=5;
lcv0.vector[1]=20;
lcv0.vector[2]=17;
lcv0.vector[3]=15;
lcv0.vector[4]=10;
expr1.evaluate(vrb);
assertEquals(2,vrb.size);
assertTrue(vrb.selectedInUse);
assertEquals(1,vrb.selected[0]);
assertEquals(2,vrb.selected[1]);
FilterLongScalarGreaterLongColumn expr2=new FilterLongScalarGreaterLongColumn(18,0);
expr2.evaluate(vrb);
assertEquals(1,vrb.size);
assertTrue(vrb.selectedInUse);
assertEquals(2,vrb.selected[0]);
VectorizedRowBatch vrb1=VectorizedRowGroupGenUtil.getVectorizedRowBatch(5,2,seed);
lcv0=(LongColumnVector)vrb1.cols[0];
lcv0.vector[0]=5;
lcv0.vector[1]=20;
lcv0.vector[2]=17;
lcv0.vector[3]=15;
lcv0.vector[4]=10;
lcv0.noNulls=false;
lcv0.isNull[0]=true;
lcv0.isNull[2]=true;
expr1.evaluate(vrb1);
assertEquals(1,vrb1.size);
assertTrue(vrb1.selectedInUse);
assertEquals(1,vrb1.selected[0]);
VectorizedRowBatch vrb2=VectorizedRowGroupGenUtil.getVectorizedRowBatch(7,2,seed);
vrb2.selectedInUse=true;
vrb2.selected[0]=1;
vrb2.selected[1]=2;
vrb2.selected[2]=4;
vrb2.size=3;
lcv0=(LongColumnVector)vrb2.cols[0];
lcv0.vector[0]=5;
lcv0.vector[1]=20;
lcv0.vector[2]=17;
lcv0.vector[3]=15;
lcv0.vector[4]=10;
lcv0.vector[5]=19;
lcv0.vector[6]=21;
lcv0.noNulls=false;
lcv0.isNull[0]=true;
lcv0.isNull[2]=true;
lcv0.isNull[5]=true;
expr1.evaluate(vrb2);
assertEquals(1,vrb2.size);
assertTrue(vrb2.selectedInUse);
assertEquals(1,vrb2.selected[0]);
VectorizedRowBatch vrb3=VectorizedRowGroupGenUtil.getVectorizedRowBatch(7,2,seed);
lcv0=(LongColumnVector)vrb3.cols[0];
lcv0.isRepeating=true;
lcv0.vector[0]=17;
lcv0.vector[1]=20;
lcv0.vector[2]=17;
lcv0.vector[3]=15;
lcv0.vector[4]=10;
expr1.evaluate(vrb3);
assertEquals(7,vrb3.size);
assertFalse(vrb3.selectedInUse);
assertTrue(lcv0.isRepeating);
lcv0.noNulls=false;
lcv0.vector[0]=17;
lcv0.isNull[0]=true;
expr1.evaluate(vrb3);
assertEquals(0,vrb3.size);
}
Class: org.apache.hadoop.hive.ql.exec.vector.expressions.TestVectorGenericDateExpressions EqualityVerifier
@Test public void testDateDiffColScalar(){
for ( VectorExpression.Type colType1 : dateTimestampStringTypes) {
for ( VectorExpression.Type scalarType2 : dateTimestampStringTypes) {
LongColumnVector date1=newRandomLongColumnVector(10000,size);
LongColumnVector output=new LongColumnVector(size);
VectorizedRowBatch batch=new VectorizedRowBatch(2,size);
batch.cols[0]=castTo(date1,colType1);
batch.cols[1]=output;
long scalar2=newRandom(1000);
validateDateDiff(batch,date1,scalar2,colType1,scalarType2);
TestVectorizedRowBatch.addRandomNulls(date1);
batch.cols[0]=castTo(date1,colType1);
validateDateDiff(batch,date1,scalar2,colType1,scalarType2);
}
}
VectorExpression udf;
byte[] bytes=new byte[0];
try {
bytes="error".getBytes("UTF-8");
}
catch ( UnsupportedEncodingException e) {
}
VectorizedRowBatch batch=new VectorizedRowBatch(2,1);
udf=new VectorUDFDateDiffColScalar(0,0,1);
udf.setInputTypes(VectorExpression.Type.TIMESTAMP,VectorExpression.Type.STRING);
batch.cols[0]=new BytesColumnVector(1);
batch.cols[1]=new LongColumnVector(1);
BytesColumnVector bcv=(BytesColumnVector)batch.cols[0];
bcv.vector[0]=bytes;
bcv.start[0]=0;
bcv.length[0]=bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[1].isNull[0],true);
udf=new VectorUDFDateDiffColScalar(0,bytes,1);
udf.setInputTypes(VectorExpression.Type.TIMESTAMP,VectorExpression.Type.STRING);
batch.cols[0]=new LongColumnVector(1);
batch.cols[1]=new LongColumnVector(1);
udf.evaluate(batch);
Assert.assertEquals(batch.cols[1].isNull[0],true);
}
EqualityVerifier
@Test public void testDate(){
for ( VectorExpression.Type colType : dateTimestampStringTypes) {
LongColumnVector date=newRandomLongColumnVector(10000,size);
BytesColumnVector output=new BytesColumnVector(size);
VectorizedRowBatch batch=new VectorizedRowBatch(2,size);
batch.cols[0]=castTo(date,colType);
batch.cols[1]=output;
validateDate(batch,colType,date);
TestVectorizedRowBatch.addRandomNulls(date);
batch.cols[0]=castTo(date,colType);
validateDate(batch,colType,date);
}
VectorExpression udf=new VectorUDFDateString(0,1);
udf.setInputTypes(VectorExpression.Type.STRING);
VectorizedRowBatch batch=new VectorizedRowBatch(2,1);
batch.cols[0]=new BytesColumnVector(1);
batch.cols[1]=new BytesColumnVector(1);
BytesColumnVector bcv=(BytesColumnVector)batch.cols[0];
byte[] bytes=new byte[0];
try {
bytes="error".getBytes("UTF-8");
}
catch ( UnsupportedEncodingException e) {
}
bcv.vector[0]=bytes;
bcv.start[0]=0;
bcv.length[0]=bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[1].isNull[0],true);
}
EqualityVerifier
@Test public void testDateDiffColCol(){
for ( VectorExpression.Type colType1 : dateTimestampStringTypes) {
for ( VectorExpression.Type colType2 : dateTimestampStringTypes) {
LongColumnVector date1=newRandomLongColumnVector(10000,size);
LongColumnVector date2=newRandomLongColumnVector(10000,size);
LongColumnVector output=new LongColumnVector(size);
VectorizedRowBatch batch=new VectorizedRowBatch(3,size);
batch.cols[0]=castTo(date1,colType1);
batch.cols[1]=castTo(date2,colType2);
batch.cols[2]=output;
validateDateDiff(batch,date1,date2,colType1,colType2);
TestVectorizedRowBatch.addRandomNulls(date1);
batch.cols[0]=castTo(date1,colType1);
validateDateDiff(batch,date1,date2,colType1,colType2);
TestVectorizedRowBatch.addRandomNulls(date2);
batch.cols[1]=castTo(date2,colType2);
validateDateDiff(batch,date1,date2,colType1,colType2);
}
}
VectorExpression udf=new VectorUDFDateDiffColCol(0,1,2);
VectorizedRowBatch batch=new VectorizedRowBatch(3,1);
BytesColumnVector bcv;
byte[] bytes=new byte[0];
try {
bytes="error".getBytes("UTF-8");
}
catch ( UnsupportedEncodingException e) {
}
udf.setInputTypes(VectorExpression.Type.STRING,VectorExpression.Type.TIMESTAMP);
batch.cols[0]=new BytesColumnVector(1);
batch.cols[1]=new LongColumnVector(1);
batch.cols[2]=new LongColumnVector(1);
bcv=(BytesColumnVector)batch.cols[0];
bcv.vector[0]=bytes;
bcv.start[0]=0;
bcv.length[0]=bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[2].isNull[0],true);
udf.setInputTypes(VectorExpression.Type.TIMESTAMP,VectorExpression.Type.STRING);
batch.cols[0]=new LongColumnVector(1);
batch.cols[1]=new BytesColumnVector(1);
batch.cols[2]=new LongColumnVector(1);
bcv=(BytesColumnVector)batch.cols[1];
bcv.vector[0]=bytes;
bcv.start[0]=0;
bcv.length[0]=bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[2].isNull[0],true);
}
EqualityVerifier
@Test public void testDateAddScalarCol(){
for ( VectorExpression.Type scalarType1 : dateTimestampStringTypes) testDateAddScalarCol(scalarType1,true);
VectorExpression udf=null;
try {
udf=new VectorUDFDateAddScalarCol("error".getBytes("UTF-8"),0,1);
}
catch ( UnsupportedEncodingException e) {
}
udf.setInputTypes(VectorExpression.Type.STRING,VectorExpression.Type.TIMESTAMP);
VectorizedRowBatch batch=new VectorizedRowBatch(2,1);
batch.cols[0]=new LongColumnVector(1);
batch.cols[1]=new BytesColumnVector(1);
udf.evaluate(batch);
Assert.assertEquals(batch.cols[1].isNull[0],true);
}
EqualityVerifier
@Test public void testDateAddColCol(){
for ( VectorExpression.Type colType1 : dateTimestampStringTypes) testDateAddColCol(colType1,true);
VectorExpression udf=new VectorUDFDateAddColCol(0,1,2);
VectorizedRowBatch batch=new VectorizedRowBatch(3,1);
BytesColumnVector bcv;
byte[] bytes=new byte[0];
try {
bytes="error".getBytes("UTF-8");
}
catch ( UnsupportedEncodingException e) {
}
udf.setInputTypes(VectorExpression.Type.STRING,VectorExpression.Type.TIMESTAMP);
batch.cols[0]=new BytesColumnVector(1);
batch.cols[1]=new LongColumnVector(1);
batch.cols[2]=new BytesColumnVector(1);
bcv=(BytesColumnVector)batch.cols[0];
bcv.vector[0]=bytes;
bcv.start[0]=0;
bcv.length[0]=bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[2].isNull[0],true);
}
EqualityVerifier
@Test public void testToDate(){
for ( VectorExpression.Type type : Arrays.asList(VectorExpression.Type.TIMESTAMP,VectorExpression.Type.STRING)) {
LongColumnVector date=newRandomLongColumnVector(10000,size);
LongColumnVector output=new LongColumnVector(size);
VectorizedRowBatch batch=new VectorizedRowBatch(2,size);
batch.cols[0]=castTo(date,type);
batch.cols[1]=output;
validateToDate(batch,type,date);
TestVectorizedRowBatch.addRandomNulls(date);
batch.cols[0]=castTo(date,type);
validateToDate(batch,type,date);
}
VectorExpression udf=new CastStringToDate(0,1);
udf.setInputTypes(VectorExpression.Type.STRING);
VectorizedRowBatch batch=new VectorizedRowBatch(2,1);
batch.cols[0]=new BytesColumnVector(1);
batch.cols[1]=new LongColumnVector(1);
BytesColumnVector bcv=(BytesColumnVector)batch.cols[0];
byte[] bytes=new byte[0];
try {
bytes="error".getBytes("UTF-8");
}
catch ( UnsupportedEncodingException e) {
}
bcv.vector[0]=bytes;
bcv.start[0]=0;
bcv.length[0]=bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[1].isNull[0],true);
}
EqualityVerifier
@Test public void testDateSubColScalar(){
for ( VectorExpression.Type colType1 : dateTimestampStringTypes) testDateAddColScalar(colType1,false);
VectorExpression udf=new VectorUDFDateSubColScalar(0,0,1);
udf.setInputTypes(VectorExpression.Type.STRING,VectorExpression.Type.TIMESTAMP);
VectorizedRowBatch batch=new VectorizedRowBatch(2,1);
batch.cols[0]=new BytesColumnVector(1);
batch.cols[1]=new BytesColumnVector(1);
BytesColumnVector bcv=(BytesColumnVector)batch.cols[0];
byte[] bytes=new byte[0];
try {
bytes="error".getBytes("UTF-8");
}
catch ( UnsupportedEncodingException e) {
}
bcv.vector[0]=bytes;
bcv.start[0]=0;
bcv.length[0]=bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[1].isNull[0],true);
}
EqualityVerifier
@Test public void testDateSubScalarCol(){
for ( VectorExpression.Type scalarType1 : dateTimestampStringTypes) testDateAddScalarCol(scalarType1,false);
VectorExpression udf=null;
try {
udf=new VectorUDFDateSubScalarCol("error".getBytes("UTF-8"),0,1);
}
catch ( UnsupportedEncodingException e) {
}
udf.setInputTypes(VectorExpression.Type.STRING,VectorExpression.Type.TIMESTAMP);
VectorizedRowBatch batch=new VectorizedRowBatch(2,1);
batch.cols[0]=new LongColumnVector(1);
batch.cols[1]=new BytesColumnVector(1);
udf.evaluate(batch);
Assert.assertEquals(batch.cols[1].isNull[0],true);
}
EqualityVerifier
@Test public void testDateAddColScalar(){
for ( VectorExpression.Type colType1 : dateTimestampStringTypes) testDateAddColScalar(colType1,true);
VectorExpression udf=new VectorUDFDateAddColScalar(0,0,1);
udf.setInputTypes(VectorExpression.Type.STRING,VectorExpression.Type.TIMESTAMP);
VectorizedRowBatch batch=new VectorizedRowBatch(2,1);
batch.cols[0]=new BytesColumnVector(1);
batch.cols[1]=new BytesColumnVector(1);
BytesColumnVector bcv=(BytesColumnVector)batch.cols[0];
byte[] bytes=new byte[0];
try {
bytes="error".getBytes("UTF-8");
}
catch ( UnsupportedEncodingException e) {
}
bcv.vector[0]=bytes;
bcv.start[0]=0;
bcv.length[0]=bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[1].isNull[0],true);
}
EqualityVerifier
@Test public void testDateSubColCol(){
for ( VectorExpression.Type colType1 : dateTimestampStringTypes) testDateAddColCol(colType1,false);
VectorExpression udf=new VectorUDFDateSubColCol(0,1,2);
VectorizedRowBatch batch=new VectorizedRowBatch(3,1);
BytesColumnVector bcv;
byte[] bytes=new byte[0];
try {
bytes="error".getBytes("UTF-8");
}
catch ( UnsupportedEncodingException e) {
}
udf.setInputTypes(VectorExpression.Type.STRING,VectorExpression.Type.TIMESTAMP);
batch.cols[0]=new BytesColumnVector(1);
batch.cols[1]=new LongColumnVector(1);
batch.cols[2]=new BytesColumnVector(1);
bcv=(BytesColumnVector)batch.cols[0];
bcv.vector[0]=bytes;
bcv.start[0]=0;
bcv.length[0]=bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[2].isNull[0],true);
}
EqualityVerifier
@Test public void testDateDiffScalarCol(){
for ( VectorExpression.Type scalarType1 : dateTimestampStringTypes) {
for ( VectorExpression.Type colType2 : dateTimestampStringTypes) {
LongColumnVector date2=newRandomLongColumnVector(10000,size);
LongColumnVector output=new LongColumnVector(size);
ColumnVector col2=castTo(date2,colType2);
VectorizedRowBatch batch=new VectorizedRowBatch(2,size);
batch.cols[0]=col2;
batch.cols[1]=output;
long scalar1=newRandom(1000);
validateDateDiff(batch,scalar1,scalarType1,colType2,date2);
TestVectorizedRowBatch.addRandomNulls(date2);
batch.cols[0]=castTo(date2,colType2);
validateDateDiff(batch,scalar1,scalarType1,colType2,date2);
}
}
VectorExpression udf;
byte[] bytes=new byte[0];
try {
bytes="error".getBytes("UTF-8");
}
catch ( UnsupportedEncodingException e) {
}
VectorizedRowBatch batch=new VectorizedRowBatch(2,1);
udf=new VectorUDFDateDiffScalarCol(0,0,1);
udf.setInputTypes(VectorExpression.Type.TIMESTAMP,VectorExpression.Type.STRING);
batch.cols[0]=new BytesColumnVector(1);
batch.cols[1]=new LongColumnVector(1);
BytesColumnVector bcv=(BytesColumnVector)batch.cols[0];
bcv.vector[0]=bytes;
bcv.start[0]=0;
bcv.length[0]=bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[1].isNull[0],true);
udf=new VectorUDFDateDiffScalarCol(bytes,0,1);
udf.setInputTypes(VectorExpression.Type.STRING,VectorExpression.Type.TIMESTAMP);
batch.cols[0]=new LongColumnVector(1);
batch.cols[1]=new LongColumnVector(1);
udf.evaluate(batch);
Assert.assertEquals(batch.cols[1].isNull[0],true);
}
Class: org.apache.hadoop.hive.ql.exec.vector.expressions.TestVectorLogicalExpressions EqualityVerifier
@Test public void testFilterExprOrExprWithBatchReuse(){
VectorizedRowBatch batch1=getBatchThreeBooleanCols();
SelectColumnIsTrue expr1=new SelectColumnIsTrue(0);
SelectColumnIsFalse expr2=new SelectColumnIsFalse(1);
FilterExprOrExpr orExpr=new FilterExprOrExpr();
orExpr.setChildExpressions(new VectorExpression[]{expr1,expr2});
orExpr.evaluate(batch1);
for (int i=0; i < VectorizedRowBatch.DEFAULT_SIZE; i++) {
batch1.selected[i]=0;
}
batch1.size=BOOLEAN_COLUMN_TEST_SIZE;
batch1.selectedInUse=false;
ColumnVector tmp=batch1.cols[0];
batch1.cols[0]=batch1.cols[1];
batch1.cols[1]=tmp;
orExpr.evaluate(batch1);
assertEquals(5,batch1.size);
assertEquals(0,batch1.selected[0]);
assertEquals(1,batch1.selected[1]);
assertEquals(3,batch1.selected[2]);
assertEquals(5,batch1.selected[3]);
assertEquals(6,batch1.selected[4]);
}
BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testIsNotNullExpr(){
VectorizedRowBatch batch=getBatchThreeBooleanCols();
IsNotNull expr=new IsNotNull(0,2);
LongColumnVector outCol=(LongColumnVector)batch.cols[2];
expr.evaluate(batch);
Assert.assertEquals(1,outCol.vector[0]);
Assert.assertEquals(0,outCol.vector[4]);
Assert.assertTrue(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
batch.cols[0].noNulls=true;
expr.evaluate(batch);
Assert.assertTrue(outCol.isRepeating);
Assert.assertTrue(outCol.noNulls);
Assert.assertEquals(1,outCol.vector[0]);
batch=getBatchThreeBooleanCols();
outCol=(LongColumnVector)batch.cols[2];
batch.cols[0].isRepeating=true;
batch.cols[0].isNull[0]=true;
expr.evaluate(batch);
Assert.assertTrue(outCol.isRepeating);
Assert.assertEquals(0,outCol.vector[0]);
Assert.assertTrue(outCol.noNulls);
batch=getBatchThreeBooleanCols();
outCol=(LongColumnVector)batch.cols[2];
batch.cols[0].isRepeating=true;
batch.cols[0].noNulls=true;
expr.evaluate(batch);
Assert.assertTrue(outCol.isRepeating);
Assert.assertTrue(outCol.noNulls);
Assert.assertEquals(1,outCol.vector[0]);
}
EqualityVerifier
@Test public void testBooleanFiltersOnColumns(){
VectorizedRowBatch batch=getBatchThreeBooleanCols();
SelectColumnIsTrue expr=new SelectColumnIsTrue(0);
expr.evaluate(batch);
assertEquals(3,batch.size);
assertEquals(2,batch.selected[0]);
assertEquals(3,batch.selected[1]);
assertEquals(7,batch.selected[2]);
batch=getBatchThreeBooleanCols();
SelectColumnIsFalse expr1=new SelectColumnIsFalse(1);
expr1.evaluate(batch);
assertEquals(3,batch.size);
assertEquals(0,batch.selected[0]);
assertEquals(2,batch.selected[1]);
assertEquals(4,batch.selected[2]);
}
BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testIsNullExpr(){
VectorizedRowBatch batch=getBatchThreeBooleanCols();
IsNull expr=new IsNull(0,2);
LongColumnVector outCol=(LongColumnVector)batch.cols[2];
expr.evaluate(batch);
Assert.assertEquals(0,outCol.vector[0]);
Assert.assertEquals(1,outCol.vector[4]);
Assert.assertTrue(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
batch.cols[0].noNulls=true;
expr.evaluate(batch);
Assert.assertTrue(outCol.isRepeating);
Assert.assertTrue(outCol.noNulls);
Assert.assertEquals(0,outCol.vector[0]);
batch=getBatchThreeBooleanCols();
outCol=(LongColumnVector)batch.cols[2];
batch.cols[0].isRepeating=true;
batch.cols[0].isNull[0]=true;
expr.evaluate(batch);
Assert.assertTrue(outCol.isRepeating);
Assert.assertEquals(1,outCol.vector[0]);
Assert.assertTrue(outCol.noNulls);
batch=getBatchThreeBooleanCols();
outCol=(LongColumnVector)batch.cols[2];
batch.cols[0].isRepeating=true;
batch.cols[0].noNulls=true;
expr.evaluate(batch);
Assert.assertTrue(outCol.isRepeating);
Assert.assertTrue(outCol.noNulls);
Assert.assertEquals(0,outCol.vector[0]);
}
BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testLongColOrLongCol(){
VectorizedRowBatch batch=getBatchThreeBooleanCols();
ColOrCol expr=new ColOrCol(0,1,2);
LongColumnVector outCol=(LongColumnVector)batch.cols[2];
expr.evaluate(batch);
Assert.assertEquals(0,outCol.vector[0]);
Assert.assertEquals(1,outCol.vector[1]);
Assert.assertEquals(1,outCol.vector[2]);
Assert.assertEquals(1,outCol.vector[3]);
Assert.assertFalse(outCol.isNull[3]);
Assert.assertTrue(outCol.isNull[4]);
Assert.assertEquals(1,outCol.vector[5]);
Assert.assertTrue(outCol.isNull[6]);
Assert.assertEquals(1,outCol.vector[7]);
Assert.assertTrue(outCol.isNull[8]);
Assert.assertEquals(batch.size,9);
Assert.assertFalse(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
batch=getBatchThreeBooleanCols();
batch.cols[0].noNulls=true;
batch.cols[1].noNulls=true;
batch.cols[2].noNulls=false;
outCol=(LongColumnVector)batch.cols[2];
expr.evaluate(batch);
Assert.assertTrue(outCol.noNulls);
Assert.assertEquals(0,outCol.vector[0]);
Assert.assertEquals(1,outCol.vector[1]);
Assert.assertEquals(1,outCol.vector[2]);
Assert.assertEquals(1,outCol.vector[3]);
batch=getBatchThreeBooleanCols();
batch.cols[0].noNulls=true;
batch.cols[0].isRepeating=true;
batch.cols[1].noNulls=true;
batch.cols[1].isRepeating=false;
batch.cols[2].noNulls=false;
batch.cols[2].isRepeating=true;
outCol=(LongColumnVector)batch.cols[2];
expr.evaluate(batch);
Assert.assertFalse(outCol.isRepeating);
Assert.assertEquals(0,outCol.vector[0]);
Assert.assertEquals(1,outCol.vector[1]);
Assert.assertEquals(0,outCol.vector[2]);
Assert.assertEquals(1,outCol.vector[3]);
}
EqualityVerifier
@Test public void testFilterExprAndExpr(){
VectorizedRowBatch batch1=getBatchThreeBooleanCols();
SelectColumnIsTrue expr1=new SelectColumnIsTrue(0);
SelectColumnIsFalse expr2=new SelectColumnIsFalse(1);
FilterExprAndExpr andExpr=new FilterExprAndExpr();
andExpr.setChildExpressions(new VectorExpression[]{expr1,expr2});
andExpr.evaluate(batch1);
assertEquals(1,batch1.size);
assertEquals(2,batch1.selected[0]);
}
IterativeVerifier EqualityVerifier
@Test public void testFilterExprOrExpr(){
VectorizedRowBatch batch1=getBatchThreeBooleanCols();
VectorizedRowBatch batch2=getBatchThreeBooleanCols();
SelectColumnIsTrue expr1=new SelectColumnIsTrue(0);
SelectColumnIsFalse expr2=new SelectColumnIsFalse(1);
FilterExprOrExpr orExpr=new FilterExprOrExpr();
orExpr.setChildExpressions(new VectorExpression[]{expr1,expr2});
orExpr.evaluate(batch1);
orExpr.evaluate(batch2);
assertEquals(batch1.size,batch2.size);
for (int j=0; j < batch1.size; j++) {
assertEquals(batch1.selected[j],batch2.selected[j]);
int i=j;
assertEquals((((LongColumnVector)batch1.cols[0]).vector[i]),(((LongColumnVector)batch2.cols[0]).vector[i]));
}
assertEquals(5,batch1.size);
assertEquals(0,batch1.selected[0]);
assertEquals(2,batch1.selected[1]);
assertEquals(3,batch1.selected[2]);
assertEquals(4,batch1.selected[3]);
assertEquals(7,batch1.selected[4]);
orExpr.evaluate(batch1);
assertEquals(5,batch1.size);
assertEquals(0,batch1.selected[0]);
assertEquals(2,batch1.selected[1]);
assertEquals(3,batch1.selected[2]);
assertEquals(4,batch1.selected[3]);
assertEquals(7,batch1.selected[4]);
}
IterativeVerifier EqualityVerifier
@Test public void testFilterExprMultiOrExpr(){
VectorizedRowBatch batch1a=getBatchThreeBooleanCols();
SelectColumnAll expr1a=new SelectColumnAll(0);
SelectColumnNotExpected expr2a=new SelectColumnNotExpected(1);
SelectColumnNotExpected expr3a=new SelectColumnNotExpected(1);
FilterExprOrExpr orExpr=new FilterExprOrExpr();
orExpr.setChildExpressions(new VectorExpression[]{expr1a,expr2a,expr3a});
orExpr.evaluate(batch1a);
assertEquals(BOOLEAN_COLUMN_TEST_SIZE,batch1a.size);
for (int i=0; i < BOOLEAN_COLUMN_TEST_SIZE; i++) {
assertEquals(i,batch1a.selected[i]);
}
VectorizedRowBatch batch1b=getBatchThreeBooleanCols();
SelectColumnIsNotNull expr1b=new SelectColumnIsNotNull(0);
SelectColumnIsNull expr2b=new SelectColumnIsNull(0);
SelectColumnNotExpected expr3b=new SelectColumnNotExpected(0);
FilterExprOrExpr orExpr2=new FilterExprOrExpr();
orExpr2.setChildExpressions(new VectorExpression[]{expr1b,expr2b,expr3b});
orExpr2.evaluate(batch1b);
assertEquals(BOOLEAN_COLUMN_TEST_SIZE,batch1b.size);
for (int i=0; i < BOOLEAN_COLUMN_TEST_SIZE; i++) {
assertEquals(i,batch1b.selected[i]);
}
VectorizedRowBatch batch1c=getBatchThreeBooleanCols();
SelectColumnIsNotNull expr1c=new SelectColumnIsNotNull(0);
SelectColumnNothing expr2c=new SelectColumnNothing(0);
SelectColumnIsNull expr3c=new SelectColumnIsNull(0);
SelectColumnNotExpected expr4c=new SelectColumnNotExpected(0);
FilterExprOrExpr orExpr3=new FilterExprOrExpr();
orExpr3.setChildExpressions(new VectorExpression[]{expr1c,expr2c,expr3c,expr4c});
orExpr3.evaluate(batch1c);
assertEquals(BOOLEAN_COLUMN_TEST_SIZE,batch1c.size);
for (int i=0; i < BOOLEAN_COLUMN_TEST_SIZE; i++) {
assertEquals(i,batch1c.selected[i]);
}
VectorizedRowBatch batch1d=getBatchThreeBooleanCols();
SelectColumnIsTrue expr1d=new SelectColumnIsTrue(0);
SelectColumnNothing expr2d=new SelectColumnNothing(0);
SelectColumnNothing expr3d=new SelectColumnNothing(0);
FilterExprOrExpr orExpr4=new FilterExprOrExpr();
orExpr4.setChildExpressions(new VectorExpression[]{expr1d,expr3d,expr3d});
orExpr4.evaluate(batch1d);
int[] expected4={2,3,7};
assertEquals(expected4.length,batch1d.size);
for (int i=0; i < expected4.length; i++) {
assertEquals(expected4[i],batch1d.selected[i]);
}
VectorizedRowBatch batch1e=getBatchThreeBooleanCols();
SelectColumnNothing expr1e=new SelectColumnNothing(0);
SelectColumnNothing expr2e=new SelectColumnNothing(0);
SelectColumnNothing expr3e=new SelectColumnNothing(0);
FilterExprOrExpr orExpr5=new FilterExprOrExpr();
orExpr5.setChildExpressions(new VectorExpression[]{expr1e,expr2e,expr3e});
orExpr5.evaluate(batch1e);
assertEquals(0,batch1e.size);
VectorizedRowBatch batch1f=getBatchThreeBooleanCols();
SelectColumnOne expr1f=new SelectColumnOne(0,4);
SelectColumnNothing expr2f=new SelectColumnNothing(0);
SelectColumnNothing expr3f=new SelectColumnNothing(0);
FilterExprOrExpr orExpr6=new FilterExprOrExpr();
orExpr6.setChildExpressions(new VectorExpression[]{expr1f,expr2f,expr3f});
orExpr6.evaluate(batch1f);
assertEquals(1,batch1f.size);
assertEquals(4,batch1f.selected[0]);
VectorizedRowBatch batch1g=getBatchThreeBooleanCols();
SelectColumnNothing expr1g=new SelectColumnNothing(0);
SelectColumnOne expr2g=new SelectColumnOne(0,2);
SelectColumnNothing expr3g=new SelectColumnNothing(0);
FilterExprOrExpr orExpr7=new FilterExprOrExpr();
orExpr7.setChildExpressions(new VectorExpression[]{expr1g,expr2g,expr3g});
orExpr7.evaluate(batch1g);
assertEquals(1,batch1g.size);
assertEquals(2,batch1g.selected[0]);
}
EqualityVerifier
@Test public void testFilterExprOrExprWithSelectInUse(){
VectorizedRowBatch batch1=getBatchThreeBooleanCols();
SelectColumnIsTrue expr1=new SelectColumnIsTrue(0);
SelectColumnIsFalse expr2=new SelectColumnIsFalse(1);
FilterExprOrExpr orExpr=new FilterExprOrExpr();
orExpr.setChildExpressions(new VectorExpression[]{expr1,expr2});
orExpr.evaluate(batch1);
ColumnVector tmp=batch1.cols[0];
batch1.cols[0]=batch1.cols[1];
batch1.cols[1]=tmp;
batch1.cols[1].isNull[7]=false;
((LongColumnVector)batch1.cols[1]).vector[7]=0;
orExpr.evaluate(batch1);
assertEquals(3,batch1.size);
assertEquals(0,batch1.selected[0]);
assertEquals(3,batch1.selected[1]);
assertEquals(7,batch1.selected[2]);
}
BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testLongColAndLongCol(){
VectorizedRowBatch batch=getBatchThreeBooleanCols();
ColAndCol expr=new ColAndCol(0,1,2);
LongColumnVector outCol=(LongColumnVector)batch.cols[2];
expr.evaluate(batch);
Assert.assertEquals(0,outCol.vector[0]);
Assert.assertEquals(0,outCol.vector[1]);
Assert.assertEquals(0,outCol.vector[2]);
Assert.assertEquals(1,outCol.vector[3]);
Assert.assertEquals(0,outCol.vector[4]);
Assert.assertFalse(outCol.isNull[4]);
Assert.assertTrue(outCol.isNull[5]);
Assert.assertEquals(0,outCol.vector[6]);
Assert.assertFalse(outCol.isNull[6]);
Assert.assertTrue(outCol.isNull[7]);
Assert.assertTrue(outCol.isNull[8]);
Assert.assertEquals(batch.size,9);
Assert.assertFalse(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
batch=getBatchThreeBooleanCols();
batch.cols[0].noNulls=true;
batch.cols[1].noNulls=true;
batch.cols[2].noNulls=false;
outCol=(LongColumnVector)batch.cols[2];
expr.evaluate(batch);
Assert.assertTrue(outCol.noNulls);
Assert.assertEquals(0,outCol.vector[0]);
Assert.assertEquals(0,outCol.vector[1]);
Assert.assertEquals(0,outCol.vector[2]);
Assert.assertEquals(1,outCol.vector[3]);
batch=getBatchThreeBooleanCols();
((LongColumnVector)batch.cols[0]).vector[0]=1;
batch.cols[0].noNulls=true;
batch.cols[0].isRepeating=true;
batch.cols[1].noNulls=true;
batch.cols[1].isRepeating=false;
batch.cols[2].noNulls=false;
batch.cols[2].isRepeating=true;
outCol=(LongColumnVector)batch.cols[2];
expr.evaluate(batch);
Assert.assertFalse(outCol.isRepeating);
Assert.assertEquals(0,outCol.vector[0]);
Assert.assertEquals(1,outCol.vector[1]);
Assert.assertEquals(0,outCol.vector[2]);
Assert.assertEquals(1,outCol.vector[3]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testLongInExpr(){
VectorizedRowBatch b=TestVectorMathFunctions.getVectorizedRowBatchLongInLongOut();
LongColumnVector outV=(LongColumnVector)b.cols[1];
long[] inVals=new long[2];
inVals[0]=0;
inVals[1]=-2;
LongColumnInList expr=new LongColumnInList(0,1);
expr.setInListValues(inVals);
expr.evaluate(b);
assertEquals(1,outV.vector[0]);
assertEquals(0,outV.vector[1]);
b.cols[0].noNulls=false;
b.cols[0].isNull[0]=true;
expr.evaluate(b);
assertEquals(true,!outV.noNulls && outV.isNull[0]);
assertEquals(0,outV.vector[1]);
b=TestVectorMathFunctions.getVectorizedRowBatchLongInLongOut();
outV=(LongColumnVector)b.cols[1];
b.cols[0].isRepeating=true;
expr.evaluate(b);
assertEquals(true,outV.isRepeating);
assertEquals(1,outV.vector[0]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testDoubleInExpr(){
VectorizedRowBatch b=TestVectorMathFunctions.getVectorizedRowBatchDoubleInLongOut();
LongColumnVector outV=(LongColumnVector)b.cols[1];
double[] inVals=new double[2];
inVals[0]=-1.5d;
inVals[1]=30d;
b.size=2;
DoubleColumnInList expr=new DoubleColumnInList(0,1);
expr.setInListValues(inVals);
expr.evaluate(b);
assertEquals(1,outV.vector[0]);
assertEquals(0,outV.vector[1]);
b.cols[0].noNulls=false;
b.cols[0].isNull[0]=true;
expr.evaluate(b);
assertEquals(true,!outV.noNulls && outV.isNull[0]);
assertEquals(0,outV.vector[1]);
b=TestVectorMathFunctions.getVectorizedRowBatchDoubleInLongOut();
outV=(LongColumnVector)b.cols[1];
b.cols[0].isRepeating=true;
expr.evaluate(b);
assertEquals(true,outV.isRepeating);
assertEquals(1,outV.vector[0]);
}
BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testBooleanNot(){
VectorizedRowBatch batch=getBatchThreeBooleanCols();
NotCol expr=new NotCol(0,2);
LongColumnVector outCol=(LongColumnVector)batch.cols[2];
expr.evaluate(batch);
Assert.assertFalse(outCol.isRepeating);
Assert.assertEquals(1,outCol.vector[0]);
Assert.assertFalse(outCol.isNull[0]);
Assert.assertEquals(0,outCol.vector[2]);
Assert.assertFalse(outCol.isNull[0]);
Assert.assertTrue(outCol.isNull[4]);
batch.cols[0].noNulls=true;
expr.evaluate(batch);
Assert.assertFalse(outCol.isRepeating);
Assert.assertTrue(outCol.noNulls);
Assert.assertEquals(1,outCol.vector[0]);
Assert.assertEquals(0,outCol.vector[2]);
batch=getBatchThreeBooleanCols();
outCol=(LongColumnVector)batch.cols[2];
batch.cols[0].isRepeating=true;
batch.cols[0].isNull[0]=true;
expr.evaluate(batch);
Assert.assertTrue(outCol.isRepeating);
Assert.assertTrue(outCol.isNull[0]);
batch=getBatchThreeBooleanCols();
outCol=(LongColumnVector)batch.cols[2];
batch.cols[0].isRepeating=true;
batch.cols[0].noNulls=true;
expr.evaluate(batch);
Assert.assertTrue(outCol.isRepeating);
Assert.assertTrue(outCol.noNulls);
Assert.assertEquals(1,outCol.vector[0]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testSelectColumnIsNull(){
VectorizedRowBatch batch=getBatchThreeBooleanCols();
SelectColumnIsNull expr=new SelectColumnIsNull(0);
expr.evaluate(batch);
assertEquals(3,batch.size);
assertEquals(4,batch.selected[0]);
assertEquals(5,batch.selected[1]);
assertEquals(8,batch.selected[2]);
batch=getBatchThreeBooleanCols();
batch.cols[0].noNulls=true;
expr.evaluate(batch);
Assert.assertEquals(0,batch.size);
batch=getBatchThreeBooleanCols();
batch.cols[0].isRepeating=true;
batch.cols[0].isNull[0]=true;
int initialSize=batch.size;
expr.evaluate(batch);
Assert.assertEquals(initialSize,batch.size);
batch=getBatchThreeBooleanCols();
batch.cols[0].isRepeating=true;
batch.cols[0].noNulls=true;
expr.evaluate(batch);
Assert.assertEquals(0,batch.size);
}
EqualityVerifier PublicFieldVerifier
@Test public void testSelectColumnIsNotNull(){
VectorizedRowBatch batch=getBatchThreeBooleanCols();
SelectColumnIsNotNull expr=new SelectColumnIsNotNull(0);
expr.evaluate(batch);
assertEquals(6,batch.size);
assertEquals(0,batch.selected[0]);
assertEquals(1,batch.selected[1]);
assertEquals(2,batch.selected[2]);
assertEquals(3,batch.selected[3]);
assertEquals(6,batch.selected[4]);
assertEquals(7,batch.selected[5]);
batch=getBatchThreeBooleanCols();
batch.cols[0].noNulls=true;
int initialSize=batch.size;
expr.evaluate(batch);
Assert.assertEquals(initialSize,batch.size);
batch=getBatchThreeBooleanCols();
batch.cols[0].isRepeating=true;
batch.cols[0].isNull[0]=true;
expr.evaluate(batch);
Assert.assertEquals(0,batch.size);
batch=getBatchThreeBooleanCols();
batch.cols[0].isRepeating=true;
batch.cols[0].noNulls=true;
initialSize=batch.size;
expr.evaluate(batch);
Assert.assertEquals(initialSize,batch.size);
}
Class: org.apache.hadoop.hive.ql.exec.vector.expressions.TestVectorMathFunctions EqualityVerifier PublicFieldVerifier
@Test public void testVectorCos(){
VectorizedRowBatch b=getVectorizedRowBatchDoubleInDoubleOut();
DoubleColumnVector resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new FuncCosDoubleToDouble(0,1);
expr.evaluate(b);
Assert.assertEquals(Math.cos(0.5d),resultV.vector[4]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVectorRound(){
VectorizedRowBatch b=getVectorizedRowBatchDoubleInDoubleOut();
VectorExpression expr=new FuncRoundDoubleToDouble(0,1);
DoubleColumnVector resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
expr.evaluate(b);
Assert.assertEquals(-2d,resultV.vector[0]);
Assert.assertEquals(-1d,resultV.vector[1]);
Assert.assertEquals(0d,resultV.vector[2]);
Assert.assertEquals(0d,resultV.vector[3]);
Assert.assertEquals(1d,resultV.vector[4]);
Assert.assertEquals(1d,resultV.vector[5]);
Assert.assertEquals(2d,resultV.vector[6]);
b.cols[0].noNulls=false;
b.cols[0].isNull[3]=true;
resultV.noNulls=true;
expr.evaluate(b);
Assert.assertEquals(true,resultV.isNull[3]);
Assert.assertEquals(false,resultV.noNulls);
b.cols[0].isRepeating=true;
resultV.isRepeating=false;
expr.evaluate(b);
Assert.assertEquals(-2d,resultV.vector[0]);
Assert.assertEquals(true,resultV.isRepeating);
resultV.isRepeating=false;
b.cols[0].noNulls=true;
expr.evaluate(b);
Assert.assertEquals(-2d,resultV.vector[0]);
Assert.assertEquals(true,resultV.isRepeating);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVectorASin(){
VectorizedRowBatch b=getVectorizedRowBatchDoubleInDoubleOut();
DoubleColumnVector resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new FuncASinDoubleToDouble(0,1);
expr.evaluate(b);
Assert.assertEquals(Math.asin(0.5d),resultV.vector[4]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVectorTan(){
VectorizedRowBatch b=getVectorizedRowBatchDoubleInDoubleOut();
DoubleColumnVector resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new FuncTanDoubleToDouble(0,1);
expr.evaluate(b);
Assert.assertEquals(Math.tan(0.5d),resultV.vector[4]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVectorLog2(){
VectorizedRowBatch b=getVectorizedRowBatchDoubleInDoubleOut();
DoubleColumnVector resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new FuncLog2DoubleToDouble(0,1);
expr.evaluate(b);
Assert.assertEquals(Math.log(0.5d) / Math.log(2),resultV.vector[4]);
b=getVectorizedRowBatchLongInDoubleOut();
resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
expr=new FuncLog2LongToDouble(0,1);
expr.evaluate(b);
Assert.assertEquals(Math.log(1) / Math.log(2),resultV.vector[3]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVectorDegrees(){
VectorizedRowBatch b=getVectorizedRowBatchDoubleInDoubleOut();
DoubleColumnVector resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new FuncDegreesDoubleToDouble(0,1);
expr.evaluate(b);
Assert.assertEquals(Math.toDegrees(0.5d),resultV.vector[4]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVectorSqrt(){
VectorizedRowBatch b=getVectorizedRowBatchDoubleInDoubleOut();
DoubleColumnVector resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new FuncSqrtDoubleToDouble(0,1);
expr.evaluate(b);
Assert.assertEquals(Math.sqrt(0.5d),resultV.vector[4]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVectorFloor(){
VectorizedRowBatch b=getVectorizedRowBatchDoubleInLongOut();
LongColumnVector resultV=(LongColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new FuncFloorDoubleToLong(0,1);
expr.evaluate(b);
Assert.assertEquals(-2,resultV.vector[0]);
Assert.assertEquals(1,resultV.vector[6]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVectorHex(){
VectorizedRowBatch b=getBatchForStringMath();
BytesColumnVector resultV=(BytesColumnVector)b.cols[2];
b.cols[1].noNulls=true;
VectorExpression expr=new FuncHex(1,2);
expr.evaluate(b);
String s=new String(resultV.vector[1],resultV.start[1],resultV.length[1]);
Assert.assertEquals("FF",s);
b=getBatchForStringMath();
resultV=(BytesColumnVector)b.cols[2];
b.cols[0].noNulls=true;
expr=new StringHex(0,2);
expr.evaluate(b);
s=new String(resultV.vector[1],resultV.start[1],resultV.length[1]);
Assert.assertEquals("33323332",s);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVectorExp(){
VectorizedRowBatch b=getVectorizedRowBatchDoubleInDoubleOut();
DoubleColumnVector resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new FuncExpDoubleToDouble(0,1);
expr.evaluate(b);
Assert.assertEquals(Math.exp(0.5d),resultV.vector[4]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVectorBin(){
VectorizedRowBatch b=getBatchForStringMath();
BytesColumnVector resultV=(BytesColumnVector)b.cols[2];
b.cols[0].noNulls=true;
VectorExpression expr=new FuncBin(1,2);
expr.evaluate(b);
String s=new String(resultV.vector[1],resultV.start[1],resultV.length[1]);
Assert.assertEquals("11111111",s);
}
BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testVectorPosMod(){
VectorizedRowBatch b=getVectorizedRowBatchDoubleInDoubleOut();
DoubleColumnVector inV=(DoubleColumnVector)b.cols[0];
DoubleColumnVector resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
inV.vector[4]=-4.0;
VectorExpression expr=new PosModDoubleToDouble(0,0.3d,1);
expr.evaluate(b);
Assert.assertTrue(equalsWithinTolerance(((-4.0d % 0.3d) + 0.3d) % 0.3d,resultV.vector[4]));
b=getVectorizedRowBatchLongInLongOut();
LongColumnVector resV2=(LongColumnVector)b.cols[1];
b.cols[0].noNulls=true;
expr=new PosModLongToLong(0,3,1);
expr.evaluate(b);
Assert.assertEquals(((-2 % 3) + 3) % 3,resV2.vector[0]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVectorRadians(){
VectorizedRowBatch b=getVectorizedRowBatchDoubleInDoubleOut();
DoubleColumnVector resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new FuncRadiansDoubleToDouble(0,1);
expr.evaluate(b);
Assert.assertEquals(Math.toRadians(0.5d),resultV.vector[4]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testRoundToDecimalPlaces(){
VectorizedRowBatch b=getVectorizedRowBatchDoubleInDoubleOut();
VectorExpression expr=new RoundWithNumDigitsDoubleToDouble(0,4,1);
((ISetLongArg)expr).setArg(4);
expr.evaluate(b);
DoubleColumnVector resultV=(DoubleColumnVector)b.cols[1];
Assert.assertEquals(1.2346d,resultV.vector[7]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVectorLn(){
VectorizedRowBatch b=getVectorizedRowBatchDoubleInDoubleOut();
DoubleColumnVector resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new FuncLnDoubleToDouble(0,1);
expr.evaluate(b);
Assert.assertEquals(Math.log(0.5),resultV.vector[4]);
b=getVectorizedRowBatchLongInDoubleOut();
resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
expr=new FuncLnLongToDouble(0,1);
expr.evaluate(b);
Assert.assertEquals(Math.log(2),resultV.vector[4]);
}
BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testVectorLog10(){
VectorizedRowBatch b=getVectorizedRowBatchDoubleInDoubleOut();
DoubleColumnVector resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new FuncLog10DoubleToDouble(0,1);
expr.evaluate(b);
Assert.assertTrue(equalsWithinTolerance(Math.log(0.5d) / Math.log(10),resultV.vector[4]));
b=getVectorizedRowBatchLongInDoubleOut();
resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
expr=new FuncLog10LongToDouble(0,1);
expr.evaluate(b);
Assert.assertEquals(Math.log(1) / Math.log(10),resultV.vector[3]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVectorACos(){
VectorizedRowBatch b=getVectorizedRowBatchDoubleInDoubleOut();
DoubleColumnVector resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new FuncACosDoubleToDouble(0,1);
expr.evaluate(b);
Assert.assertEquals(Math.acos(0.5d),resultV.vector[4]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVectorAbs(){
VectorizedRowBatch b=getVectorizedRowBatchDoubleInDoubleOut();
DoubleColumnVector resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new FuncAbsDoubleToDouble(0,1);
expr.evaluate(b);
Assert.assertEquals(1.5,resultV.vector[0]);
Assert.assertEquals(0.5,resultV.vector[4]);
b=getVectorizedRowBatchLongInLongOut();
LongColumnVector resultVLong=(LongColumnVector)b.cols[1];
b.cols[0].noNulls=true;
expr=new FuncAbsLongToLong(0,1);
expr.evaluate(b);
Assert.assertEquals(2,resultVLong.vector[0]);
Assert.assertEquals(2,resultVLong.vector[1]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVectorATan(){
VectorizedRowBatch b=getVectorizedRowBatchDoubleInDoubleOut();
DoubleColumnVector resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new FuncATanDoubleToDouble(0,1);
expr.evaluate(b);
Assert.assertEquals(Math.atan(0.5d),resultV.vector[4]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVectorCeil(){
VectorizedRowBatch b=getVectorizedRowBatchDoubleInLongOut();
LongColumnVector resultV=(LongColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new FuncCeilDoubleToLong(0,1);
expr.evaluate(b);
Assert.assertEquals(-1,resultV.vector[0]);
Assert.assertEquals(2,resultV.vector[6]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVectorSin(){
VectorizedRowBatch b=getVectorizedRowBatchDoubleInDoubleOut();
DoubleColumnVector resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new FuncSinDoubleToDouble(0,1);
expr.evaluate(b);
Assert.assertEquals(Math.sin(0.5d),resultV.vector[4]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVectorSign(){
VectorizedRowBatch b=getVectorizedRowBatchDoubleInDoubleOut();
DoubleColumnVector resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new FuncSignDoubleToDouble(0,1);
expr.evaluate(b);
Assert.assertEquals(-1.0d,resultV.vector[0]);
Assert.assertEquals(1.0d,resultV.vector[4]);
b=getVectorizedRowBatchLongInDoubleOut();
resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
expr=new FuncSignLongToDouble(0,1);
expr.evaluate(b);
Assert.assertEquals(-1.0d,resultV.vector[0]);
Assert.assertEquals(1.0d,resultV.vector[4]);
}
Class: org.apache.hadoop.hive.ql.exec.vector.expressions.TestVectorScalarColArithmetic IterativeVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testLongScalarSubtractLongColNoNulls(){
VectorizedRowBatch batch=getVectorizedRowBatchSingleLongVector(VectorizedRowBatch.DEFAULT_SIZE);
LongScalarSubtractLongColumn expr=new LongScalarSubtractLongColumn(100,0,1);
expr.evaluate(batch);
for (int i=0; i < VectorizedRowBatch.DEFAULT_SIZE; i++) {
Assert.assertEquals(100 - i * 37,((LongColumnVector)batch.cols[1]).vector[i]);
}
Assert.assertTrue(((LongColumnVector)batch.cols[1]).noNulls);
Assert.assertFalse(((LongColumnVector)batch.cols[1]).isRepeating);
}
EqualityVerifier PublicFieldVerifier
@Test public void testBooleanValuedLongIn(){
VectorizedRowBatch batch=getBatch();
long[] a=new long[2];
a[0]=20;
a[1]=1000;
batch.size=2;
VectorExpression expr=(new LongColumnInList(0,1));
((LongColumnInList)expr).setInListValues(a);
expr.evaluate(batch);
LongColumnVector out=(LongColumnVector)batch.cols[1];
Assert.assertEquals(0,out.vector[0]);
Assert.assertEquals(1,out.vector[1]);
}
BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testLongScalarSubtractLongColWithRepeating(){
LongColumnVector in, out;
VectorizedRowBatch batch;
LongScalarSubtractLongColumn expr;
batch=getVectorizedRowBatchSingleLongVector(VectorizedRowBatch.DEFAULT_SIZE);
in=(LongColumnVector)batch.cols[0];
in.isRepeating=true;
out=(LongColumnVector)batch.cols[1];
out.isRepeating=false;
expr=new LongScalarSubtractLongColumn(100,0,1);
expr.evaluate(batch);
Assert.assertTrue(out.isRepeating);
Assert.assertTrue(out.noNulls);
Assert.assertEquals(out.vector[0],100 - 0 * 37);
batch=getVectorizedRowBatchSingleLongVector(VectorizedRowBatch.DEFAULT_SIZE);
in=(LongColumnVector)batch.cols[0];
in.isRepeating=true;
in.noNulls=false;
in.isNull[0]=true;
out=(LongColumnVector)batch.cols[1];
out.isRepeating=false;
out.isNull[0]=false;
out.noNulls=true;
expr=new LongScalarSubtractLongColumn(100,0,1);
expr.evaluate(batch);
Assert.assertTrue(out.isRepeating);
Assert.assertFalse(out.noNulls);
Assert.assertEquals(true,out.isNull[0]);
TestVectorArithmeticExpressions.verifyLongNullDataVectorEntries(out,batch.selected,batch.selectedInUse,batch.size);
}
IterativeVerifier BranchVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testLongScalarSubtractLongColWithNulls(){
VectorizedRowBatch batch=getVectorizedRowBatchSingleLongVector(VectorizedRowBatch.DEFAULT_SIZE);
LongColumnVector lcv=(LongColumnVector)batch.cols[0];
TestVectorizedRowBatch.addRandomNulls(lcv);
LongScalarSubtractLongColumn expr=new LongScalarSubtractLongColumn(100,0,1);
expr.evaluate(batch);
for (int i=0; i < VectorizedRowBatch.DEFAULT_SIZE; i++) {
if (!lcv.isNull[i]) {
Assert.assertEquals(100 - i * 37,((LongColumnVector)batch.cols[1]).vector[i]);
}
else {
Assert.assertTrue(((LongColumnVector)batch.cols[1]).isNull[i]);
}
}
Assert.assertFalse(((LongColumnVector)batch.cols[1]).noNulls);
Assert.assertFalse(((LongColumnVector)batch.cols[1]).isRepeating);
TestVectorArithmeticExpressions.verifyLongNullDataVectorEntries((LongColumnVector)batch.cols[1],batch.selected,batch.selectedInUse,batch.size);
}
IterativeVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testLongScalarModuloLongColNoNulls(){
VectorizedRowBatch batch=getBatchSingleLongVectorPositiveNonZero();
LongScalarModuloLongColumn expr=new LongScalarModuloLongColumn(100,0,1);
expr.evaluate(batch);
for (int i=0; i < VectorizedRowBatch.DEFAULT_SIZE; i++) {
Assert.assertEquals(100 % ((i + 1) * 37),((LongColumnVector)batch.cols[1]).vector[i]);
}
Assert.assertTrue(((LongColumnVector)batch.cols[1]).noNulls);
Assert.assertFalse(((LongColumnVector)batch.cols[1]).isRepeating);
}
Class: org.apache.hadoop.hive.ql.exec.vector.expressions.TestVectorStringExpressions EqualityVerifier PublicFieldVerifier
@Test public void testVectorLTrim(){
VectorizedRowBatch b=makeTrimBatch();
VectorExpression expr=new StringLTrim(0,1);
expr.evaluate(b);
BytesColumnVector outV=(BytesColumnVector)b.cols[1];
Assert.assertEquals(0,StringExpr.compare(emptyString,0,0,outV.vector[0],0,0));
Assert.assertEquals(0,StringExpr.compare(blanksLeft,2,3,outV.vector[1],outV.start[1],outV.length[1]));
Assert.assertEquals(0,StringExpr.compare(blanksRight,0,5,outV.vector[2],outV.start[2],outV.length[2]));
Assert.assertEquals(0,StringExpr.compare(blanksBoth,2,5,outV.vector[3],outV.start[3],outV.length[3]));
Assert.assertEquals(0,StringExpr.compare(red,0,3,outV.vector[4],outV.start[4],outV.length[4]));
Assert.assertEquals(0,StringExpr.compare(blankString,0,0,outV.vector[5],outV.start[5],outV.length[5]));
}
BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testStringColCompareStringColProjection(){
VectorizedRowBatch batch;
VectorExpression expr;
long[] outVector;
batch=makeStringBatchForColColCompare();
expr=new StringGroupColLessStringGroupColumn(0,1,3);
expr.evaluate(batch);
Assert.assertEquals(4,batch.size);
outVector=((LongColumnVector)batch.cols[3]).vector;
Assert.assertFalse(batch.cols[3].isNull[0]);
Assert.assertEquals(1,outVector[0]);
Assert.assertFalse(batch.cols[3].isNull[1]);
Assert.assertEquals(0,outVector[1]);
Assert.assertFalse(batch.cols[3].isNull[2]);
Assert.assertEquals(0,outVector[2]);
Assert.assertTrue(batch.cols[3].isNull[3]);
batch=makeStringBatchForColColCompare();
batch.cols[0].noNulls=true;
batch.cols[1].noNulls=true;
expr.evaluate(batch);
Assert.assertEquals(4,batch.size);
outVector=((LongColumnVector)batch.cols[3]).vector;
Assert.assertTrue(batch.cols[3].noNulls);
Assert.assertFalse(batch.cols[3].isNull[0]);
Assert.assertEquals(1,outVector[0]);
Assert.assertFalse(batch.cols[3].isNull[1]);
Assert.assertEquals(0,outVector[1]);
Assert.assertFalse(batch.cols[3].isNull[2]);
Assert.assertEquals(0,outVector[2]);
Assert.assertFalse(batch.cols[3].isNull[3]);
Assert.assertEquals(1,outVector[3]);
batch=makeStringBatchForColColCompare();
batch.cols[1].noNulls=true;
expr.evaluate(batch);
Assert.assertEquals(4,batch.size);
outVector=((LongColumnVector)batch.cols[3]).vector;
Assert.assertFalse(batch.cols[3].isNull[0]);
Assert.assertEquals(1,outVector[0]);
Assert.assertFalse(batch.cols[3].isNull[1]);
Assert.assertEquals(0,outVector[1]);
Assert.assertFalse(batch.cols[3].isNull[2]);
Assert.assertEquals(0,outVector[2]);
Assert.assertTrue(batch.cols[3].isNull[3]);
batch=makeStringBatchForColColCompare();
batch.cols[0].noNulls=true;
batch.cols[1].isNull[3]=true;
expr.evaluate(batch);
Assert.assertEquals(4,batch.size);
outVector=((LongColumnVector)batch.cols[3]).vector;
Assert.assertFalse(batch.cols[3].noNulls);
Assert.assertFalse(batch.cols[3].isNull[0]);
Assert.assertEquals(1,outVector[0]);
Assert.assertFalse(batch.cols[3].isNull[1]);
Assert.assertEquals(0,outVector[1]);
Assert.assertFalse(batch.cols[3].isNull[2]);
Assert.assertEquals(0,outVector[2]);
Assert.assertTrue(batch.cols[3].isNull[3]);
batch=makeStringBatchForColColCompare();
batch.cols[0].isRepeating=true;
expr.evaluate(batch);
Assert.assertEquals(4,batch.size);
outVector=((LongColumnVector)batch.cols[3]).vector;
Assert.assertFalse(batch.cols[3].isNull[0]);
Assert.assertEquals(1,outVector[0]);
Assert.assertFalse(batch.cols[3].isNull[1]);
Assert.assertEquals(1,outVector[1]);
Assert.assertFalse(batch.cols[3].isNull[2]);
Assert.assertEquals(0,outVector[2]);
Assert.assertFalse(batch.cols[3].isNull[3]);
Assert.assertEquals(1,outVector[3]);
batch=makeStringBatchForColColCompare();
batch.cols[1].isRepeating=true;
expr.evaluate(batch);
Assert.assertEquals(4,batch.size);
outVector=((LongColumnVector)batch.cols[3]).vector;
Assert.assertFalse(batch.cols[3].noNulls);
Assert.assertFalse(batch.cols[3].isNull[0]);
Assert.assertEquals(1,outVector[0]);
Assert.assertFalse(batch.cols[3].isNull[1]);
Assert.assertEquals(1,outVector[1]);
Assert.assertFalse(batch.cols[3].isNull[2]);
Assert.assertEquals(0,outVector[2]);
Assert.assertTrue(batch.cols[3].isNull[3]);
batch=makeStringBatchForColColCompare();
batch.cols[0].isRepeating=true;
batch.cols[1].isRepeating=true;
expr.evaluate(batch);
Assert.assertEquals(4,batch.size);
outVector=((LongColumnVector)batch.cols[3]).vector;
Assert.assertTrue(batch.cols[3].isRepeating);
Assert.assertFalse(batch.cols[3].isNull[0]);
Assert.assertEquals(1,outVector[0]);
batch=makeStringBatchForColColCompare();
batch.cols[0].isRepeating=true;
batch.cols[1].noNulls=true;
expr.evaluate(batch);
outVector=((LongColumnVector)batch.cols[3]).vector;
Assert.assertEquals(4,batch.size);
Assert.assertFalse(batch.cols[3].noNulls);
Assert.assertFalse(batch.cols[3].isNull[0]);
Assert.assertEquals(1,outVector[0]);
Assert.assertFalse(batch.cols[3].isNull[1]);
Assert.assertEquals(1,outVector[1]);
Assert.assertFalse(batch.cols[3].isNull[2]);
Assert.assertEquals(0,outVector[2]);
Assert.assertFalse(batch.cols[3].isNull[3]);
Assert.assertEquals(1,outVector[3]);
batch=makeStringBatchForColColCompare();
batch.cols[0].isRepeating=true;
batch.cols[1].noNulls=true;
batch.cols[0].isNull[0]=true;
expr.evaluate(batch);
Assert.assertEquals(4,batch.size);
Assert.assertFalse(batch.cols[3].noNulls);
Assert.assertTrue(batch.cols[3].isRepeating);
Assert.assertTrue(batch.cols[3].isNull[0]);
batch=makeStringBatchForColColCompare();
batch.cols[1].isRepeating=true;
batch.cols[1].noNulls=true;
expr.evaluate(batch);
outVector=((LongColumnVector)batch.cols[3]).vector;
Assert.assertEquals(4,batch.size);
Assert.assertFalse(batch.cols[3].noNulls);
Assert.assertFalse(batch.cols[3].isNull[0]);
Assert.assertEquals(1,outVector[0]);
Assert.assertFalse(batch.cols[3].isNull[1]);
Assert.assertEquals(1,outVector[1]);
Assert.assertFalse(batch.cols[3].isNull[2]);
Assert.assertEquals(0,outVector[2]);
Assert.assertTrue(batch.cols[3].isNull[3]);
batch=makeStringBatchForColColCompare();
batch.cols[0].isRepeating=true;
batch.cols[1].isRepeating=true;
batch.cols[1].noNulls=true;
expr.evaluate(batch);
Assert.assertEquals(4,batch.size);
outVector=((LongColumnVector)batch.cols[3]).vector;
Assert.assertTrue(batch.cols[3].isRepeating);
Assert.assertFalse(batch.cols[3].isNull[0]);
Assert.assertEquals(1,outVector[0]);
batch=makeStringBatchForColColCompare();
batch.cols[0].isRepeating=true;
batch.cols[0].noNulls=true;
batch.cols[1].isNull[0]=true;
expr.evaluate(batch);
outVector=((LongColumnVector)batch.cols[3]).vector;
Assert.assertEquals(4,batch.size);
Assert.assertFalse(batch.cols[3].noNulls);
Assert.assertTrue(batch.cols[3].isNull[0]);
Assert.assertFalse(batch.cols[3].isNull[1]);
Assert.assertEquals(1,outVector[1]);
Assert.assertFalse(batch.cols[3].isNull[2]);
Assert.assertEquals(0,outVector[2]);
Assert.assertFalse(batch.cols[3].isNull[3]);
Assert.assertEquals(1,outVector[3]);
batch=makeStringBatchForColColCompare();
batch.cols[1].isRepeating=true;
batch.cols[0].noNulls=true;
expr.evaluate(batch);
outVector=((LongColumnVector)batch.cols[3]).vector;
Assert.assertEquals(4,batch.size);
Assert.assertFalse(batch.cols[3].noNulls);
Assert.assertFalse(batch.cols[3].isNull[0]);
Assert.assertEquals(1,outVector[0]);
Assert.assertFalse(batch.cols[3].isNull[1]);
Assert.assertEquals(1,outVector[1]);
Assert.assertFalse(batch.cols[3].isNull[2]);
Assert.assertEquals(0,outVector[2]);
Assert.assertFalse(batch.cols[3].isNull[3]);
Assert.assertEquals(1,outVector[3]);
batch=makeStringBatchForColColCompare();
batch.cols[1].isRepeating=true;
batch.cols[0].noNulls=true;
batch.cols[1].isNull[0]=true;
expr.evaluate(batch);
Assert.assertEquals(4,batch.size);
Assert.assertFalse(batch.cols[3].noNulls);
Assert.assertTrue(batch.cols[3].isRepeating);
Assert.assertTrue(batch.cols[3].isNull[0]);
batch=makeStringBatchForColColCompare();
batch.cols[0].isRepeating=true;
batch.cols[1].isRepeating=true;
batch.cols[0].noNulls=true;
expr.evaluate(batch);
Assert.assertEquals(4,batch.size);
outVector=((LongColumnVector)batch.cols[3]).vector;
Assert.assertTrue(batch.cols[3].isRepeating);
Assert.assertFalse(batch.cols[3].isNull[0]);
Assert.assertEquals(1,outVector[0]);
batch=makeStringBatchForColColCompare();
batch.cols[0].isRepeating=true;
batch.cols[1].isRepeating=true;
batch.cols[0].noNulls=true;
batch.cols[1].isNull[0]=true;
expr.evaluate(batch);
Assert.assertEquals(4,batch.size);
Assert.assertFalse(batch.cols[3].noNulls);
Assert.assertTrue(batch.cols[3].isRepeating);
Assert.assertTrue(batch.cols[3].isNull[0]);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testColLower(){
VectorizedRowBatch batch=makeStringBatchMixedCase();
StringLower expr=new StringLower(0,1);
expr.evaluate(batch);
BytesColumnVector outCol=(BytesColumnVector)batch.cols[1];
int cmp=StringExpr.compare(mixedUpLower,0,mixedUpLower.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.isNull[2]);
int cmp2=StringExpr.compare(green,0,green.length,outCol.vector[1],outCol.start[1],outCol.length[1]);
Assert.assertEquals(0,cmp2);
batch=makeStringBatchMixedCase();
batch.cols[0].noNulls=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
cmp=StringExpr.compare(mixedUpLower,0,mixedUpLower.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.noNulls);
batch=makeStringBatchMixedCase();
batch.cols[0].isRepeating=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
cmp=StringExpr.compare(mixedUpLower,0,mixedUpLower.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.isRepeating);
Assert.assertFalse(outCol.noNulls);
batch=makeStringBatchMixedCase();
batch.cols[0].isRepeating=true;
batch.cols[0].noNulls=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
cmp=StringExpr.compare(mixedUpLower,0,mixedUpLower.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.isRepeating);
Assert.assertTrue(outCol.noNulls);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testColConcatCharScalar(){
VectorizedRowBatch batch=makeStringBatch();
StringGroupColConcatCharScalar expr=new StringGroupColConcatCharScalar(0,new HiveChar(new String(red),6),1);
expr.evaluate(batch);
BytesColumnVector outCol=(BytesColumnVector)batch.cols[1];
int cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.isNull[2]);
int cmp2=StringExpr.compare(greenred,0,greenred.length,outCol.vector[1],outCol.start[1],outCol.length[1]);
Assert.assertEquals(0,cmp2);
Assert.assertFalse(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
batch=makeStringBatch();
batch.cols[0].noNulls=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
cmp2=StringExpr.compare(greenred,0,greenred.length,outCol.vector[1],outCol.start[1],outCol.length[1]);
Assert.assertEquals(0,cmp2);
int cmp3=StringExpr.compare(red,0,red.length,outCol.vector[2],outCol.start[2],outCol.length[2]);
Assert.assertEquals(0,cmp3);
Assert.assertTrue(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
batch=makeStringBatch();
batch.cols[0].isRepeating=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.isRepeating);
Assert.assertFalse(outCol.noNulls);
batch=makeStringBatch();
batch.cols[0].isRepeating=true;
batch.cols[0].noNulls=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.isRepeating);
Assert.assertTrue(outCol.noNulls);
}
EqualityVerifier PublicFieldVerifier
@Test public void testStringColCompareStringScalarProjection(){
VectorizedRowBatch batch=makeStringBatch();
VectorExpression expr;
expr=new StringGroupColEqualStringScalar(0,red2,2);
expr.evaluate(batch);
Assert.assertEquals(3,batch.size);
LongColumnVector outVector=(LongColumnVector)batch.cols[2];
Assert.assertEquals(1,outVector.vector[0]);
Assert.assertEquals(0,outVector.vector[1]);
Assert.assertEquals(0,outVector.vector[2]);
batch=makeStringBatch();
expr=new StringGroupColEqualStringScalar(0,green,2);
expr.evaluate(batch);
Assert.assertEquals(3,batch.size);
outVector=(LongColumnVector)batch.cols[2];
Assert.assertEquals(0,outVector.vector[0]);
Assert.assertEquals(1,outVector.vector[1]);
Assert.assertEquals(0,outVector.vector[2]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVectorRTrim(){
VectorizedRowBatch b=makeTrimBatch();
VectorExpression expr=new StringRTrim(0,1);
expr.evaluate(b);
BytesColumnVector outV=(BytesColumnVector)b.cols[1];
Assert.assertEquals(0,StringExpr.compare(emptyString,0,0,outV.vector[0],0,0));
Assert.assertEquals(0,StringExpr.compare(blanksLeft,0,5,outV.vector[1],outV.start[1],outV.length[1]));
Assert.assertEquals(0,StringExpr.compare(blanksRight,0,3,outV.vector[2],outV.start[2],outV.length[2]));
Assert.assertEquals(0,StringExpr.compare(blanksBoth,0,5,outV.vector[3],outV.start[3],outV.length[3]));
Assert.assertEquals(0,StringExpr.compare(red,0,3,outV.vector[4],outV.start[4],outV.length[4]));
Assert.assertEquals(0,StringExpr.compare(blankString,0,0,outV.vector[5],outV.start[5],outV.length[5]));
}
EqualityVerifier PublicFieldVerifier
@Test public void testCharScalarCompareStringColProjection(){
VectorizedRowBatch batch=makeStringBatch();
VectorExpression expr;
expr=new CharScalarEqualStringGroupColumn(new HiveChar(new String(red2),8),0,2);
expr.evaluate(batch);
Assert.assertEquals(3,batch.size);
LongColumnVector outVector=(LongColumnVector)batch.cols[2];
Assert.assertEquals(1,outVector.vector[0]);
Assert.assertEquals(0,outVector.vector[1]);
Assert.assertEquals(0,outVector.vector[2]);
batch=makeStringBatch();
expr=new CharScalarEqualStringGroupColumn(new HiveChar(new String(green),10),0,2);
expr.evaluate(batch);
Assert.assertEquals(3,batch.size);
outVector=(LongColumnVector)batch.cols[2];
Assert.assertEquals(0,outVector.vector[0]);
Assert.assertEquals(1,outVector.vector[1]);
Assert.assertEquals(0,outVector.vector[2]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testStringColCompareCharScalarProjection(){
VectorizedRowBatch batch=makeStringBatch();
VectorExpression expr;
expr=new StringGroupColEqualCharScalar(0,new HiveChar(new String(red2),8),2);
expr.evaluate(batch);
Assert.assertEquals(3,batch.size);
LongColumnVector outVector=(LongColumnVector)batch.cols[2];
Assert.assertEquals(1,outVector.vector[0]);
Assert.assertEquals(0,outVector.vector[1]);
Assert.assertEquals(0,outVector.vector[2]);
batch=makeStringBatch();
expr=new StringGroupColEqualCharScalar(0,new HiveChar(new String(green),10),2);
expr.evaluate(batch);
Assert.assertEquals(3,batch.size);
outVector=(LongColumnVector)batch.cols[2];
Assert.assertEquals(0,outVector.vector[0]);
Assert.assertEquals(1,outVector.vector[1]);
Assert.assertEquals(0,outVector.vector[2]);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testColUpper(){
VectorizedRowBatch batch=makeStringBatchMixedCase();
StringUpper expr=new StringUpper(0,1);
batch.cols[0].noNulls=true;
expr.evaluate(batch);
BytesColumnVector outCol=(BytesColumnVector)batch.cols[1];
int cmp=StringExpr.compare(mixedUpUpper,0,mixedUpUpper.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.noNulls);
}
BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testSubstrStartLen() throws UnsupportedEncodingException {
VectorizedRowBatch batch=new VectorizedRowBatch(2);
BytesColumnVector v=new BytesColumnVector();
batch.cols[0]=v;
BytesColumnVector outV=new BytesColumnVector();
batch.cols[1]=outV;
byte[] data1="abcd string".getBytes("UTF-8");
byte[] data2="efgh string".getBytes("UTF-8");
byte[] data3="efgh".getBytes("UTF-8");
batch.size=3;
v.noNulls=true;
v.setRef(0,data1,0,data1.length);
v.isNull[0]=false;
v.setRef(1,data2,0,data2.length);
v.isNull[1]=false;
v.setRef(2,data3,0,data3.length);
v.isNull[2]=false;
outV.isRepeating=true;
outV.noNulls=false;
StringSubstrColStartLen expr=new StringSubstrColStartLen(0,6,6,1);
expr.evaluate(batch);
BytesColumnVector outCol=(BytesColumnVector)batch.cols[1];
Assert.assertEquals(3,batch.size);
Assert.assertTrue(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
byte[] expected="string".getBytes("UTF-8");
Assert.assertEquals(0,StringExpr.compare(expected,0,expected.length,outCol.vector[0],outCol.start[0],outCol.length[0]));
Assert.assertEquals(0,StringExpr.compare(expected,0,expected.length,outCol.vector[1],outCol.start[1],outCol.length[1]));
Assert.assertEquals(0,StringExpr.compare(emptyString,0,emptyString.length,outCol.vector[2],outCol.start[2],outCol.length[2]));
outV.isRepeating=true;
outV.noNulls=false;
expr=new StringSubstrColStartLen(0,-6,6,1);
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
Assert.assertTrue(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
Assert.assertEquals(3,batch.size);
Assert.assertEquals(0,StringExpr.compare(expected,0,expected.length,outCol.vector[0],outCol.start[0],outCol.length[0]));
Assert.assertEquals(0,StringExpr.compare(expected,0,expected.length,outCol.vector[1],outCol.start[1],outCol.length[1]));
Assert.assertEquals(0,StringExpr.compare(emptyString,0,emptyString.length,outCol.vector[2],outCol.start[2],outCol.length[2]));
outV.isRepeating=true;
outV.noNulls=false;
expr=new StringSubstrColStartLen(0,1,0,1);
outCol=(BytesColumnVector)batch.cols[1];
expr.evaluate(batch);
Assert.assertEquals(3,batch.size);
Assert.assertTrue(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
Assert.assertEquals(0,StringExpr.compare(data1,1,0,outCol.vector[0],outCol.start[0],outCol.length[0]));
Assert.assertEquals(0,StringExpr.compare(data2,1,0,outCol.vector[1],outCol.start[1],outCol.length[1]));
Assert.assertEquals(0,StringExpr.compare(data3,1,0,outCol.vector[2],outCol.start[2],outCol.length[2]));
outV.isRepeating=true;
outV.noNulls=false;
expr=new StringSubstrColStartLen(0,0,11,1);
outCol=(BytesColumnVector)batch.cols[1];
expr.evaluate(batch);
Assert.assertEquals(3,batch.size);
Assert.assertTrue(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
Assert.assertEquals(0,StringExpr.compare(data1,0,data1.length,outCol.vector[0],outCol.start[0],outCol.length[0]));
Assert.assertEquals(0,StringExpr.compare(data2,0,data2.length,outCol.vector[1],outCol.start[1],outCol.length[1]));
Assert.assertEquals(0,StringExpr.compare(data3,0,data3.length,outCol.vector[2],outCol.start[2],outCol.length[2]));
outV.isRepeating=true;
outV.noNulls=false;
expr=new StringSubstrColStartLen(0,6,10,1);
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
Assert.assertEquals(3,batch.size);
Assert.assertTrue(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
Assert.assertEquals(0,StringExpr.compare(expected,0,expected.length,outCol.vector[0],outCol.start[0],outCol.length[0]));
Assert.assertEquals(0,StringExpr.compare(expected,0,expected.length,outCol.vector[1],outCol.start[1],outCol.length[1]));
Assert.assertEquals(0,StringExpr.compare(emptyString,0,emptyString.length,outCol.vector[2],outCol.start[2],outCol.length[2]));
outV.isRepeating=true;
outV.noNulls=true;
v.noNulls=false;
v.isNull[0]=true;
expr.evaluate(batch);
Assert.assertEquals(3,batch.size);
Assert.assertFalse(outV.noNulls);
Assert.assertTrue(outV.isNull[0]);
Assert.assertFalse(outCol.isRepeating);
Assert.assertEquals(0,StringExpr.compare(expected,0,expected.length,outCol.vector[1],outCol.start[1],outCol.length[1]));
Assert.assertEquals(0,StringExpr.compare(emptyString,0,emptyString.length,outCol.vector[2],outCol.start[2],outCol.length[2]));
outV=new BytesColumnVector();
v=new BytesColumnVector();
outV.isRepeating=false;
outV.noNulls=true;
v.isRepeating=true;
v.noNulls=false;
v.setRef(0,data1,0,data1.length);
batch=new VectorizedRowBatch(2);
batch.cols[0]=v;
batch.cols[1]=outV;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
Assert.assertTrue(outCol.noNulls);
Assert.assertTrue(outCol.isRepeating);
Assert.assertEquals(0,StringExpr.compare(expected,0,expected.length,outCol.vector[0],outCol.start[0],outCol.length[0]));
v=new BytesColumnVector();
v.isRepeating=false;
v.noNulls=true;
batch.size=1;
v.setRef(0,multiByte,0,10);
batch.cols[0]=v;
batch.cols[1]=outV;
outV.isRepeating=true;
outV.noNulls=false;
expr=new StringSubstrColStartLen(0,3,2,1);
expr.evaluate(batch);
Assert.assertEquals(1,batch.size);
Assert.assertFalse(outV.isRepeating);
Assert.assertTrue(outV.noNulls);
Assert.assertEquals(0,StringExpr.compare(multiByte,3,10 - 3,outCol.vector[0],outCol.start[0],outCol.length[0]));
v=new BytesColumnVector();
v.isRepeating=false;
v.noNulls=true;
outV=new BytesColumnVector();
batch.size=1;
v.setRef(0,multiByte,3,7);
batch.cols[0]=v;
batch.cols[1]=outV;
outV.isRepeating=true;
outV.noNulls=false;
expr=new StringSubstrColStartLen(0,2,2,1);
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
Assert.assertEquals(1,batch.size);
Assert.assertFalse(outV.isRepeating);
Assert.assertTrue(outV.noNulls);
Assert.assertEquals(0,StringExpr.compare(multiByte,6,10 - 6,outCol.vector[0],outCol.start[0],outCol.length[0]));
}
APIUtilityVerifier BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testColConcatVarCharScalar(){
VectorizedRowBatch batch=makeStringBatch();
StringGroupColConcatVarCharScalar expr=new StringGroupColConcatVarCharScalar(0,new HiveVarchar(new String(red),14),1);
expr.evaluate(batch);
BytesColumnVector outCol=(BytesColumnVector)batch.cols[1];
int cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.isNull[2]);
int cmp2=StringExpr.compare(greenred,0,greenred.length,outCol.vector[1],outCol.start[1],outCol.length[1]);
Assert.assertEquals(0,cmp2);
Assert.assertFalse(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
batch=makeStringBatch();
batch.cols[0].noNulls=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
cmp2=StringExpr.compare(greenred,0,greenred.length,outCol.vector[1],outCol.start[1],outCol.length[1]);
Assert.assertEquals(0,cmp2);
int cmp3=StringExpr.compare(red,0,red.length,outCol.vector[2],outCol.start[2],outCol.length[2]);
Assert.assertEquals(0,cmp3);
Assert.assertTrue(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
batch=makeStringBatch();
batch.cols[0].isRepeating=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.isRepeating);
Assert.assertFalse(outCol.noNulls);
batch=makeStringBatch();
batch.cols[0].isRepeating=true;
batch.cols[0].noNulls=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.isRepeating);
Assert.assertTrue(outCol.noNulls);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVarCharScalarCompareStringColProjection(){
VectorizedRowBatch batch=makeStringBatch();
VectorExpression expr;
expr=new VarCharScalarEqualStringGroupColumn(new HiveVarchar(new String(red2),8),0,2);
expr.evaluate(batch);
Assert.assertEquals(3,batch.size);
LongColumnVector outVector=(LongColumnVector)batch.cols[2];
Assert.assertEquals(1,outVector.vector[0]);
Assert.assertEquals(0,outVector.vector[1]);
Assert.assertEquals(0,outVector.vector[2]);
batch=makeStringBatch();
expr=new VarCharScalarEqualStringGroupColumn(new HiveVarchar(new String(green),10),0,2);
expr.evaluate(batch);
Assert.assertEquals(3,batch.size);
outVector=(LongColumnVector)batch.cols[2];
Assert.assertEquals(0,outVector.vector[0]);
Assert.assertEquals(1,outVector.vector[1]);
Assert.assertEquals(0,outVector.vector[2]);
}
BooleanVerifier EqualityVerifier HybridVerifier
/**
* Test vectorized regex expression.
*/
@Test public void testRegex() throws HiveException {
VectorizedRowBatch b=makeStringBatch();
FilterStringColRegExpStringScalar expr=new FilterStringColRegExpStringScalar(0,"a.*".getBytes());
b.size=5;
b.selectedInUse=false;
BytesColumnVector v=(BytesColumnVector)b.cols[0];
v.isRepeating=false;
v.noNulls=false;
String s1="4kMasVoB7lX1wc5i64bNk";
String s2="a27V63IL7jK3o";
String s3="27V63IL7jK3oa";
String s4="27V63IL7jK3o";
v.isNull[0]=false;
v.setRef(0,s1.getBytes(),0,s1.getBytes().length);
v.isNull[1]=true;
v.vector[1]=null;
v.isNull[2]=false;
v.setRef(2,s2.getBytes(),0,s2.getBytes().length);
v.isNull[3]=false;
v.setRef(3,s3.getBytes(),0,s3.getBytes().length);
v.isNull[4]=false;
v.setRef(4,s4.getBytes(),0,s4.getBytes().length);
expr.evaluate(b);
Assert.assertTrue(b.selectedInUse);
Assert.assertEquals(3,b.size);
Assert.assertEquals(0,b.selected[0]);
Assert.assertEquals(2,b.selected[1]);
Assert.assertEquals(3,b.selected[2]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testStringLike() throws HiveException {
VectorizedRowBatch batch;
Text pattern;
int initialBatchSize;
batch=makeStringBatchMixedCharSize();
pattern=new Text(mixPercentPattern);
FilterStringColLikeStringScalar expr=new FilterStringColLikeStringScalar(0,mixPercentPattern);
expr.evaluate(batch);
Assert.assertEquals(1,batch.size);
Assert.assertEquals(0,batch.selected[0]);
batch=makeStringBatchMixedCharSize();
batch.cols[0].noNulls=true;
expr.evaluate(batch);
Assert.assertEquals(1,batch.size);
Assert.assertEquals(0,batch.selected[0]);
batch=makeStringBatchMixedCharSize();
initialBatchSize=batch.size;
batch.cols[0].isRepeating=true;
expr.evaluate(batch);
Assert.assertEquals(initialBatchSize,batch.size);
batch=makeStringBatchMixedCharSize();
batch.cols[0].isRepeating=true;
batch.cols[0].isNull[0]=true;
expr.evaluate(batch);
Assert.assertEquals(0,batch.size);
batch=makeStringBatchMixedCharSize();
initialBatchSize=batch.size;
batch.cols[0].isRepeating=true;
batch.cols[0].noNulls=true;
expr.evaluate(batch);
Assert.assertEquals(initialBatchSize,batch.size);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testColConcatStringScalar(){
VectorizedRowBatch batch=makeStringBatch();
StringGroupColConcatStringScalar expr=new StringGroupColConcatStringScalar(0,red,1);
expr.evaluate(batch);
BytesColumnVector outCol=(BytesColumnVector)batch.cols[1];
int cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.isNull[2]);
int cmp2=StringExpr.compare(greenred,0,greenred.length,outCol.vector[1],outCol.start[1],outCol.length[1]);
Assert.assertEquals(0,cmp2);
Assert.assertFalse(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
batch=makeStringBatch();
batch.cols[0].noNulls=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
cmp2=StringExpr.compare(greenred,0,greenred.length,outCol.vector[1],outCol.start[1],outCol.length[1]);
Assert.assertEquals(0,cmp2);
int cmp3=StringExpr.compare(red,0,red.length,outCol.vector[2],outCol.start[2],outCol.length[2]);
Assert.assertEquals(0,cmp3);
Assert.assertTrue(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
batch=makeStringBatch();
batch.cols[0].isRepeating=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.isRepeating);
Assert.assertFalse(outCol.noNulls);
batch=makeStringBatch();
batch.cols[0].isRepeating=true;
batch.cols[0].noNulls=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.isRepeating);
Assert.assertTrue(outCol.noNulls);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testCharScalarConcatCol(){
VectorizedRowBatch batch=makeStringBatch();
CharScalarConcatStringGroupCol expr=new CharScalarConcatStringGroupCol(new HiveChar(new String(red),6),0,1);
expr.evaluate(batch);
BytesColumnVector outCol=(BytesColumnVector)batch.cols[1];
int cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.isNull[2]);
int cmp2=StringExpr.compare(redgreen,0,redgreen.length,outCol.vector[1],outCol.start[1],outCol.length[1]);
Assert.assertEquals(0,cmp2);
Assert.assertFalse(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
batch=makeStringBatch();
batch.cols[0].noNulls=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
cmp2=StringExpr.compare(redgreen,0,redgreen.length,outCol.vector[1],outCol.start[1],outCol.length[1]);
Assert.assertEquals(0,cmp2);
int cmp3=StringExpr.compare(red,0,red.length,outCol.vector[2],outCol.start[2],outCol.length[2]);
Assert.assertEquals(0,cmp3);
Assert.assertTrue(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
batch=makeStringBatch();
batch.cols[0].isRepeating=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.isRepeating);
Assert.assertFalse(outCol.noNulls);
batch=makeStringBatch();
batch.cols[0].isRepeating=true;
batch.cols[0].noNulls=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.isRepeating);
Assert.assertTrue(outCol.noNulls);
}
EqualityVerifier PublicFieldVerifier
@Test public void testStringColCompareVarCharScalarProjection(){
VectorizedRowBatch batch=makeStringBatch();
VectorExpression expr;
expr=new StringGroupColEqualVarCharScalar(0,new HiveVarchar(new String(red2),8),2);
expr.evaluate(batch);
Assert.assertEquals(3,batch.size);
LongColumnVector outVector=(LongColumnVector)batch.cols[2];
Assert.assertEquals(1,outVector.vector[0]);
Assert.assertEquals(0,outVector.vector[1]);
Assert.assertEquals(0,outVector.vector[2]);
batch=makeStringBatch();
expr=new StringGroupColEqualVarCharScalar(0,new HiveVarchar(new String(green),10),2);
expr.evaluate(batch);
Assert.assertEquals(3,batch.size);
outVector=(LongColumnVector)batch.cols[2];
Assert.assertEquals(0,outVector.vector[0]);
Assert.assertEquals(1,outVector.vector[1]);
Assert.assertEquals(0,outVector.vector[2]);
}
BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testSubstrStart() throws UnsupportedEncodingException {
VectorizedRowBatch batch=new VectorizedRowBatch(2);
BytesColumnVector v=new BytesColumnVector();
batch.cols[0]=v;
BytesColumnVector outV=new BytesColumnVector();
batch.cols[1]=outV;
byte[] data1="abcd string".getBytes("UTF-8");
byte[] data2="efgh string".getBytes("UTF-8");
byte[] data3="efgh".getBytes("UTF-8");
batch.size=3;
v.noNulls=true;
v.setRef(0,data1,0,data1.length);
v.isNull[0]=false;
v.setRef(1,data2,0,data2.length);
v.isNull[1]=false;
v.setRef(2,data3,0,data3.length);
v.isNull[2]=false;
StringSubstrColStart expr=new StringSubstrColStart(0,6,1);
expr.evaluate(batch);
BytesColumnVector outCol=(BytesColumnVector)batch.cols[1];
Assert.assertEquals(3,batch.size);
Assert.assertTrue(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
byte[] expected="string".getBytes("UTF-8");
Assert.assertEquals(0,StringExpr.compare(expected,0,expected.length,outCol.vector[0],outCol.start[0],outCol.length[0]));
Assert.assertEquals(0,StringExpr.compare(expected,0,expected.length,outCol.vector[1],outCol.start[1],outCol.length[1]));
Assert.assertEquals(0,StringExpr.compare(emptyString,0,emptyString.length,outCol.vector[2],outCol.start[2],outCol.length[2]));
outCol.noNulls=false;
outCol.isRepeating=true;
expr=new StringSubstrColStart(0,-6,1);
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
Assert.assertEquals(3,batch.size);
Assert.assertTrue(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
Assert.assertEquals(0,StringExpr.compare(expected,0,expected.length,outCol.vector[0],outCol.start[0],outCol.length[0]));
Assert.assertEquals(0,StringExpr.compare(expected,0,expected.length,outCol.vector[1],outCol.start[1],outCol.length[1]));
Assert.assertEquals(0,StringExpr.compare(emptyString,0,emptyString.length,outCol.vector[2],outCol.start[2],outCol.length[2]));
outCol.noNulls=false;
outCol.isRepeating=true;
expr=new StringSubstrColStart(0,1,1);
expr.evaluate(batch);
Assert.assertEquals(3,batch.size);
Assert.assertTrue(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
Assert.assertEquals(0,StringExpr.compare(data1,0,data1.length,outCol.vector[0],outCol.start[0],outCol.length[0]));
Assert.assertEquals(0,StringExpr.compare(data2,0,data2.length,outCol.vector[1],outCol.start[1],outCol.length[1]));
Assert.assertEquals(0,StringExpr.compare(data3,0,data3.length,outCol.vector[2],outCol.start[2],outCol.length[2]));
outV.noNulls=false;
outV.isRepeating=true;
expr=new StringSubstrColStart(0,6,1);
v.noNulls=false;
v.isNull[0]=true;
expr.evaluate(batch);
Assert.assertEquals(3,batch.size);
Assert.assertFalse(outV.noNulls);
Assert.assertTrue(outV.isNull[0]);
Assert.assertEquals(0,StringExpr.compare(expected,0,expected.length,outCol.vector[1],outCol.start[1],outCol.length[1]));
Assert.assertEquals(0,StringExpr.compare(emptyString,0,emptyString.length,outCol.vector[2],outCol.start[2],outCol.length[2]));
outCol.noNulls=false;
outCol.isRepeating=false;
outV=new BytesColumnVector();
v=new BytesColumnVector();
v.isRepeating=true;
v.noNulls=true;
v.setRef(0,data1,0,data1.length);
batch=new VectorizedRowBatch(2);
batch.cols[0]=v;
batch.cols[1]=outV;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
expected="string".getBytes("UTF-8");
Assert.assertTrue(outV.isRepeating);
Assert.assertTrue(outV.noNulls);
Assert.assertEquals(0,StringExpr.compare(expected,0,expected.length,outCol.vector[0],outCol.start[0],outCol.length[0]));
v=new BytesColumnVector();
v.isRepeating=false;
v.noNulls=true;
v.setRef(0,multiByte,0,10);
batch.cols[0]=v;
batch.cols[1]=outV;
outV.isRepeating=true;
outV.noNulls=false;
expr=new StringSubstrColStart(0,3,1);
batch.size=1;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
Assert.assertFalse(outV.isRepeating);
Assert.assertTrue(outV.noNulls);
Assert.assertEquals(0,StringExpr.compare(multiByte,3,10 - 3,outCol.vector[0],outCol.start[0],outCol.length[0]));
v=new BytesColumnVector();
v.isRepeating=false;
v.noNulls=true;
v.setRef(0,multiByte,3,7);
batch.cols[0]=v;
batch.cols[1]=outV;
outV.isRepeating=true;
outV.noNulls=false;
outCol=(BytesColumnVector)batch.cols[1];
expr=new StringSubstrColStart(0,2,1);
expr.evaluate(batch);
Assert.assertFalse(outV.isRepeating);
Assert.assertTrue(outV.noNulls);
Assert.assertEquals(0,StringExpr.compare(multiByte,6,4,outCol.vector[0],outCol.start[0],outCol.length[0]));
}
EqualityVerifier
@Test public void testStringColCompareStringColFilter(){
VectorizedRowBatch batch;
VectorExpression expr;
batch=makeStringBatchForColColCompare();
expr=new FilterStringGroupColLessStringGroupColumn(0,1);
expr.evaluate(batch);
Assert.assertEquals(1,batch.size);
Assert.assertEquals(0,batch.selected[0]);
batch=makeStringBatchForColColCompare();
batch.cols[0].noNulls=true;
batch.cols[1].noNulls=true;
expr.evaluate(batch);
Assert.assertEquals(2,batch.size);
Assert.assertEquals(3,batch.selected[1]);
batch=makeStringBatchForColColCompare();
batch.cols[1].noNulls=true;
expr.evaluate(batch);
Assert.assertEquals(1,batch.size);
Assert.assertEquals(0,batch.selected[0]);
batch=makeStringBatchForColColCompare();
batch.cols[0].noNulls=true;
batch.cols[1].isNull[3]=true;
expr.evaluate(batch);
Assert.assertEquals(1,batch.size);
Assert.assertEquals(0,batch.selected[0]);
batch=makeStringBatchForColColCompare();
batch.cols[0].isRepeating=true;
expr.evaluate(batch);
Assert.assertEquals(3,batch.size);
Assert.assertEquals(3,batch.selected[2]);
batch=makeStringBatchForColColCompare();
batch.cols[1].isRepeating=true;
expr.evaluate(batch);
Assert.assertEquals(2,batch.size);
Assert.assertEquals(1,batch.selected[1]);
batch=makeStringBatchForColColCompare();
batch.cols[0].isRepeating=true;
batch.cols[1].isRepeating=true;
expr.evaluate(batch);
Assert.assertEquals(4,batch.size);
batch=makeStringBatchForColColCompare();
batch.cols[0].isRepeating=true;
batch.cols[1].noNulls=true;
expr.evaluate(batch);
Assert.assertEquals(3,batch.size);
Assert.assertEquals(3,batch.selected[2]);
batch=makeStringBatchForColColCompare();
batch.cols[0].isRepeating=true;
batch.cols[1].noNulls=true;
batch.cols[0].isNull[0]=true;
expr.evaluate(batch);
Assert.assertEquals(0,batch.size);
batch=makeStringBatchForColColCompare();
batch.cols[1].isRepeating=true;
batch.cols[1].noNulls=true;
expr.evaluate(batch);
Assert.assertEquals(2,batch.size);
Assert.assertEquals(0,batch.selected[0]);
Assert.assertEquals(1,batch.selected[1]);
batch=makeStringBatchForColColCompare();
batch.cols[0].isRepeating=true;
batch.cols[1].isRepeating=true;
batch.cols[1].noNulls=true;
expr.evaluate(batch);
Assert.assertEquals(4,batch.size);
batch=makeStringBatchForColColCompare();
batch.cols[0].isRepeating=true;
batch.cols[0].noNulls=true;
batch.cols[1].isNull[0]=true;
expr.evaluate(batch);
Assert.assertEquals(2,batch.size);
Assert.assertEquals(3,batch.selected[1]);
batch=makeStringBatchForColColCompare();
batch.cols[1].isRepeating=true;
batch.cols[0].noNulls=true;
expr.evaluate(batch);
Assert.assertEquals(3,batch.size);
Assert.assertEquals(3,batch.selected[2]);
batch=makeStringBatchForColColCompare();
batch.cols[1].isRepeating=true;
batch.cols[0].noNulls=true;
batch.cols[1].isNull[0]=true;
expr.evaluate(batch);
Assert.assertEquals(0,batch.size);
batch=makeStringBatchForColColCompare();
batch.cols[0].isRepeating=true;
batch.cols[1].isRepeating=true;
batch.cols[0].noNulls=true;
expr.evaluate(batch);
Assert.assertEquals(4,batch.size);
batch=makeStringBatchForColColCompare();
batch.cols[0].isRepeating=true;
batch.cols[1].isRepeating=true;
batch.cols[0].noNulls=true;
batch.cols[1].isNull[0]=true;
expr.evaluate(batch);
Assert.assertEquals(0,batch.size);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testStringScalarConcatCol(){
VectorizedRowBatch batch=makeStringBatch();
StringScalarConcatStringGroupCol expr=new StringScalarConcatStringGroupCol(red,0,1);
expr.evaluate(batch);
BytesColumnVector outCol=(BytesColumnVector)batch.cols[1];
int cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.isNull[2]);
int cmp2=StringExpr.compare(redgreen,0,redgreen.length,outCol.vector[1],outCol.start[1],outCol.length[1]);
Assert.assertEquals(0,cmp2);
Assert.assertFalse(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
batch=makeStringBatch();
batch.cols[0].noNulls=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
cmp2=StringExpr.compare(redgreen,0,redgreen.length,outCol.vector[1],outCol.start[1],outCol.length[1]);
Assert.assertEquals(0,cmp2);
int cmp3=StringExpr.compare(red,0,red.length,outCol.vector[2],outCol.start[2],outCol.length[2]);
Assert.assertEquals(0,cmp3);
Assert.assertTrue(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
batch=makeStringBatch();
batch.cols[0].isRepeating=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.isRepeating);
Assert.assertFalse(outCol.noNulls);
batch=makeStringBatch();
batch.cols[0].isRepeating=true;
batch.cols[0].noNulls=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.isRepeating);
Assert.assertTrue(outCol.noNulls);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testVarCharScalarConcatCol(){
VectorizedRowBatch batch=makeStringBatch();
VarCharScalarConcatStringGroupCol expr=new VarCharScalarConcatStringGroupCol(new HiveVarchar(new String(red),14),0,1);
expr.evaluate(batch);
BytesColumnVector outCol=(BytesColumnVector)batch.cols[1];
int cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.isNull[2]);
int cmp2=StringExpr.compare(redgreen,0,redgreen.length,outCol.vector[1],outCol.start[1],outCol.length[1]);
Assert.assertEquals(0,cmp2);
Assert.assertFalse(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
batch=makeStringBatch();
batch.cols[0].noNulls=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
cmp2=StringExpr.compare(redgreen,0,redgreen.length,outCol.vector[1],outCol.start[1],outCol.length[1]);
Assert.assertEquals(0,cmp2);
int cmp3=StringExpr.compare(red,0,red.length,outCol.vector[2],outCol.start[2],outCol.length[2]);
Assert.assertEquals(0,cmp3);
Assert.assertTrue(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
batch=makeStringBatch();
batch.cols[0].isRepeating=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.isRepeating);
Assert.assertFalse(outCol.noNulls);
batch=makeStringBatch();
batch.cols[0].isRepeating=true;
batch.cols[0].noNulls=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[1];
cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.isRepeating);
Assert.assertTrue(outCol.noNulls);
}
EqualityVerifier PublicFieldVerifier
@Test public void testStringInExpr(){
VectorizedRowBatch b=makeStringBatch();
b.size=2;
b.cols[0].noNulls=true;
byte[][] inVals=new byte[2][];
inVals[0]=red;
inVals[1]=blue;
StringColumnInList expr=new StringColumnInList(0,2);
expr.setInListValues(inVals);
expr.evaluate(b);
LongColumnVector outV=(LongColumnVector)b.cols[2];
Assert.assertEquals(1,outV.vector[0]);
Assert.assertEquals(0,outV.vector[1]);
b=makeStringBatch();
b.size=2;
b.cols[0].noNulls=false;
b.cols[0].isNull[0]=true;
expr.evaluate(b);
outV=(LongColumnVector)b.cols[2];
Assert.assertEquals(true,!outV.noNulls && outV.isNull[0] && !outV.isNull[1]);
Assert.assertEquals(0,outV.vector[1]);
b=makeStringBatch();
b.size=2;
b.cols[0].noNulls=true;
b.cols[0].isRepeating=true;
expr.evaluate(b);
outV=(LongColumnVector)b.cols[2];
Assert.assertEquals(1,outV.vector[0]);
Assert.assertEquals(true,outV.isRepeating);
}
BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testStringLength(){
VectorizedRowBatch batch=makeStringBatchMixedCharSize();
StringLength expr=new StringLength(0,1);
expr.evaluate(batch);
LongColumnVector outCol=(LongColumnVector)batch.cols[1];
Assert.assertEquals(5,outCol.vector[1]);
Assert.assertTrue(outCol.isNull[2]);
Assert.assertEquals(4,outCol.vector[3]);
batch=makeStringBatchMixedCharSize();
batch.cols[0].noNulls=true;
expr.evaluate(batch);
outCol=(LongColumnVector)batch.cols[1];
Assert.assertTrue(outCol.noNulls);
Assert.assertEquals(4,outCol.vector[3]);
batch=makeStringBatchMixedCharSize();
batch.cols[0].isRepeating=true;
expr.evaluate(batch);
outCol=(LongColumnVector)batch.cols[1];
Assert.assertTrue(outCol.isRepeating);
Assert.assertFalse(outCol.noNulls);
Assert.assertEquals(7,outCol.vector[0]);
batch=makeStringBatchMixedCharSize();
batch.cols[0].isRepeating=true;
batch.cols[0].noNulls=true;
expr.evaluate(batch);
outCol=(LongColumnVector)batch.cols[1];
Assert.assertEquals(7,outCol.vector[0]);
Assert.assertTrue(outCol.isRepeating);
Assert.assertTrue(outCol.noNulls);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVectorTrim(){
VectorizedRowBatch b=makeTrimBatch();
VectorExpression expr=new StringTrim(0,1);
expr.evaluate(b);
BytesColumnVector outV=(BytesColumnVector)b.cols[1];
Assert.assertEquals(0,StringExpr.compare(emptyString,0,0,outV.vector[0],0,0));
Assert.assertEquals(0,StringExpr.compare(blanksLeft,2,3,outV.vector[1],outV.start[1],outV.length[1]));
Assert.assertEquals(0,StringExpr.compare(blanksRight,0,3,outV.vector[2],outV.start[2],outV.length[2]));
Assert.assertEquals(0,StringExpr.compare(blanksBoth,2,3,outV.vector[3],outV.start[3],outV.length[3]));
Assert.assertEquals(0,StringExpr.compare(red,0,3,outV.vector[4],outV.start[4],outV.length[4]));
Assert.assertEquals(0,StringExpr.compare(blankString,0,0,outV.vector[5],outV.start[5],outV.length[5]));
}
APIUtilityVerifier BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testColConcatCol(){
VectorizedRowBatch batch=makeStringBatch2In1Out();
StringGroupConcatColCol expr=new StringGroupConcatColCol(0,1,2);
expr.evaluate(batch);
BytesColumnVector outCol=(BytesColumnVector)batch.cols[2];
int cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertTrue(outCol.isNull[2]);
int cmp2=StringExpr.compare(greengreen,0,greengreen.length,outCol.vector[1],outCol.start[1],outCol.length[1]);
Assert.assertEquals(0,cmp2);
Assert.assertFalse(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
batch=makeStringBatch2In1Out();
batch.cols[0].noNulls=true;
batch.cols[1].noNulls=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[2];
cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
cmp2=StringExpr.compare(greengreen,0,greengreen.length,outCol.vector[1],outCol.start[1],outCol.length[1]);
Assert.assertEquals(0,cmp2);
int cmp3=StringExpr.compare(emptyString,0,emptyString.length,outCol.vector[2],outCol.start[2],outCol.length[2]);
Assert.assertEquals(0,cmp3);
Assert.assertTrue(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
batch=makeStringBatch2In1Out();
batch.cols[0].isRepeating=true;
batch.cols[0].isNull[0]=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[2];
Assert.assertEquals(3,batch.size);
Assert.assertEquals(true,outCol.isRepeating);
Assert.assertEquals(true,outCol.isNull[0]);
batch=makeStringBatch2In1Out();
batch.cols[0].isRepeating=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[2];
Assert.assertEquals(false,outCol.isRepeating);
cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
Assert.assertEquals(true,outCol.isNull[2]);
batch=makeStringBatch2In1Out();
batch.cols[1].isRepeating=true;
batch.cols[1].isNull[0]=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[2];
Assert.assertEquals(3,batch.size);
Assert.assertEquals(true,outCol.isRepeating);
Assert.assertEquals(true,outCol.isNull[0]);
batch=makeStringBatch2In1Out();
batch.cols[0].isRepeating=true;
batch.cols[0].isNull[0]=true;
batch.cols[1].isRepeating=true;
batch.cols[1].isNull[0]=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[2];
Assert.assertEquals(3,batch.size);
Assert.assertEquals(true,outCol.isRepeating);
Assert.assertEquals(true,outCol.isNull[0]);
batch=makeStringBatch2In1Out();
batch.cols[1].isRepeating=true;
batch.cols[1].noNulls=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[2];
Assert.assertEquals(3,batch.size);
Assert.assertEquals(false,outCol.isRepeating);
Assert.assertEquals(false,outCol.isNull[0]);
Assert.assertEquals(false,outCol.noNulls);
Assert.assertEquals(true,outCol.isNull[2]);
cmp=StringExpr.compare(greenred,0,greenred.length,outCol.vector[1],outCol.start[1],outCol.length[1]);
Assert.assertEquals(0,cmp);
batch.cols[0].noNulls=true;
expr.evaluate(batch);
Assert.assertEquals(false,outCol.isRepeating);
Assert.assertEquals(true,outCol.noNulls);
cmp=StringExpr.compare(red,0,red.length,outCol.vector[2],outCol.start[2],outCol.length[2]);
Assert.assertEquals(0,cmp);
batch=makeStringBatch2In1Out();
batch.cols[0].isRepeating=true;
batch.cols[0].noNulls=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[2];
Assert.assertEquals(3,batch.size);
Assert.assertEquals(false,outCol.isRepeating);
Assert.assertEquals(false,outCol.isNull[0]);
Assert.assertEquals(false,outCol.noNulls);
Assert.assertEquals(true,outCol.isNull[2]);
cmp=StringExpr.compare(redgreen,0,redgreen.length,outCol.vector[1],outCol.start[1],outCol.length[1]);
Assert.assertEquals(0,cmp);
batch=makeStringBatch2In1Out();
batch.cols[0].isRepeating=true;
batch.cols[0].noNulls=true;
batch.cols[1].isRepeating=true;
batch.cols[1].noNulls=true;
expr.evaluate(batch);
outCol=(BytesColumnVector)batch.cols[2];
Assert.assertEquals(3,batch.size);
Assert.assertEquals(true,outCol.isRepeating);
Assert.assertEquals(false,outCol.isNull[0]);
cmp=StringExpr.compare(redred,0,redred.length,outCol.vector[0],outCol.start[0],outCol.length[0]);
Assert.assertEquals(0,cmp);
}
EqualityVerifier PublicFieldVerifier
@Test public void testStringScalarCompareStringColProjection(){
VectorizedRowBatch batch=makeStringBatch();
VectorExpression expr;
expr=new StringScalarEqualStringGroupColumn(red2,0,2);
expr.evaluate(batch);
Assert.assertEquals(3,batch.size);
LongColumnVector outVector=(LongColumnVector)batch.cols[2];
Assert.assertEquals(1,outVector.vector[0]);
Assert.assertEquals(0,outVector.vector[1]);
Assert.assertEquals(0,outVector.vector[2]);
batch=makeStringBatch();
expr=new StringScalarEqualStringGroupColumn(green,0,2);
expr.evaluate(batch);
Assert.assertEquals(3,batch.size);
outVector=(LongColumnVector)batch.cols[2];
Assert.assertEquals(0,outVector.vector[0]);
Assert.assertEquals(1,outVector.vector[1]);
Assert.assertEquals(0,outVector.vector[2]);
}
Class: org.apache.hadoop.hive.ql.exec.vector.expressions.TestVectorTimestampExpressions EqualityVerifier PublicFieldVerifier
@Test public void testVectorUDFYearString(){
testVectorUDFYear(TestType.STRING_LONG);
VectorizedRowBatch batch=getVectorizedRowBatchStringLong(new byte[]{'2','2','0','1','3'},1,3);
VectorExpression udf=new VectorUDFYearString(0,1);
udf.evaluate(batch);
LongColumnVector lcv=(LongColumnVector)batch.cols[1];
Assert.assertEquals(false,batch.cols[0].isNull[0]);
Assert.assertEquals(true,lcv.isNull[0]);
}
Class: org.apache.hadoop.hive.ql.exec.vector.expressions.TestVectorTypeCasts EqualityVerifier PublicFieldVerifier
@Test public void testCastDoubleToBoolean(){
VectorizedRowBatch b=TestVectorMathFunctions.getVectorizedRowBatchDoubleInLongOut();
LongColumnVector resultV=(LongColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new CastDoubleToBooleanViaDoubleToLong(0,1);
expr.evaluate(b);
Assert.assertEquals(0,resultV.vector[3]);
Assert.assertEquals(1,resultV.vector[4]);
}
BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testCastDecimalToDouble(){
final double eps=0.000001d;
VectorizedRowBatch b=getBatchDecimalDouble();
VectorExpression expr=new CastDecimalToDouble(0,1);
expr.evaluate(b);
DoubleColumnVector r=(DoubleColumnVector)b.cols[1];
assertEquals(1.1d,r.vector[0],eps);
assertEquals(-2.2d,r.vector[1],eps);
assertEquals(9999999999999999.0d,r.vector[2],eps);
b=getBatchDecimalDouble();
b.cols[0].noNulls=false;
b.cols[0].isNull[1]=true;
expr.evaluate(b);
r=(DoubleColumnVector)b.cols[1];
assertFalse(r.noNulls);
assertTrue(r.isNull[1]);
assertFalse(r.isNull[0]);
assertEquals(1.1d,r.vector[0],eps);
b=getBatchDecimalDouble();
b.cols[0].isRepeating=true;
expr.evaluate(b);
r=(DoubleColumnVector)b.cols[1];
assertTrue(r.isRepeating);
assertEquals(1.1d,r.vector[0],eps);
b=getBatchDecimalDouble();
b.cols[0].isRepeating=true;
b.cols[0].noNulls=false;
b.cols[0].isNull[0]=true;
expr.evaluate(b);
r=(DoubleColumnVector)b.cols[1];
assertTrue(r.isRepeating);
assertTrue(r.isNull[0]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testCastTimestampToDouble(){
VectorizedRowBatch b=TestVectorMathFunctions.getVectorizedRowBatchLongInDoubleOut();
LongColumnVector inV=(LongColumnVector)b.cols[0];
DoubleColumnVector resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new CastTimestampToDoubleViaLongToDouble(0,1);
expr.evaluate(b);
Assert.assertEquals(-1E-9D,resultV.vector[1]);
Assert.assertEquals(1E-9D,resultV.vector[3]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVectorCastDoubleToLong(){
VectorizedRowBatch b=TestVectorMathFunctions.getVectorizedRowBatchDoubleInLongOut();
LongColumnVector resultV=(LongColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new CastDoubleToLong(0,1);
expr.evaluate(b);
Assert.assertEquals(1,resultV.vector[6]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testCastLongToTimestamp(){
VectorizedRowBatch b=TestVectorMathFunctions.getVectorizedRowBatchLongInLongOut();
LongColumnVector resultV=(LongColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new CastLongToTimestampViaLongToLong(0,1);
expr.evaluate(b);
Assert.assertEquals(-2 * NANOS_PER_SECOND,resultV.vector[0]);
Assert.assertEquals(2 * NANOS_PER_SECOND,resultV.vector[1]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testCastDecimalToTimestamp(){
VectorizedRowBatch b=getBatchDecimalLong2();
VectorExpression expr=new CastDecimalToTimestamp(0,1);
expr.evaluate(b);
LongColumnVector r=(LongColumnVector)b.cols[1];
assertEquals(1111111111L,r.vector[0]);
assertEquals(-2222222222L,r.vector[1]);
assertEquals(31536000999999999L,r.vector[2]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testCastBooleanToString(){
byte[] t=toBytes("TRUE");
byte[] f=toBytes("FALSE");
VectorizedRowBatch b=TestVectorMathFunctions.getBatchForStringMath();
LongColumnVector inV=(LongColumnVector)b.cols[1];
BytesColumnVector resultV=(BytesColumnVector)b.cols[2];
inV.vector[1]=1;
VectorExpression expr=new CastBooleanToStringViaLongToString(1,2);
expr.evaluate(b);
Assert.assertEquals(0,StringExpr.compare(f,0,f.length,resultV.vector[0],resultV.start[0],resultV.length[0]));
Assert.assertEquals(0,StringExpr.compare(t,0,t.length,resultV.vector[1],resultV.start[1],resultV.length[1]));
}
BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testCastDecimalToLong(){
VectorizedRowBatch b=getBatchDecimalLong();
VectorExpression expr=new CastDecimalToLong(0,1);
expr.evaluate(b);
LongColumnVector r=(LongColumnVector)b.cols[1];
assertEquals(1,r.vector[0]);
assertEquals(-2,r.vector[1]);
assertEquals(9999999999999999L,r.vector[2]);
b=getBatchDecimalLong();
b.cols[0].noNulls=false;
b.cols[0].isNull[1]=true;
expr.evaluate(b);
r=(LongColumnVector)b.cols[1];
assertFalse(r.noNulls);
assertTrue(r.isNull[1]);
assertFalse(r.isNull[0]);
assertEquals(1,r.vector[0]);
b=getBatchDecimalLong();
b.cols[0].isRepeating=true;
expr.evaluate(b);
r=(LongColumnVector)b.cols[1];
assertTrue(r.isRepeating);
assertEquals(1,r.vector[0]);
b=getBatchDecimalLong();
b.cols[0].isRepeating=true;
b.cols[0].noNulls=false;
b.cols[0].isNull[0]=true;
expr.evaluate(b);
r=(LongColumnVector)b.cols[1];
assertTrue(r.isRepeating);
assertTrue(r.isNull[0]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testVectorCastLongToDouble(){
VectorizedRowBatch b=TestVectorMathFunctions.getVectorizedRowBatchLongInDoubleOut();
DoubleColumnVector resultV=(DoubleColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new CastLongToDouble(0,1);
expr.evaluate(b);
Assert.assertEquals(2.0,resultV.vector[4]);
}
BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testCastDecimalToString(){
VectorizedRowBatch b=getBatchDecimalString();
VectorExpression expr=new CastDecimalToString(0,1);
expr.evaluate(b);
BytesColumnVector r=(BytesColumnVector)b.cols[1];
byte[] v=toBytes("1.1");
assertTrue(((Integer)v.length).toString() + " " + r.length[0],v.length == r.length[0]);
Assert.assertEquals(0,StringExpr.compare(v,0,v.length,r.vector[0],r.start[0],r.length[0]));
v=toBytes("-2.2");
Assert.assertEquals(0,StringExpr.compare(v,0,v.length,r.vector[1],r.start[1],r.length[1]));
v=toBytes("9999999999999999");
Assert.assertEquals(0,StringExpr.compare(v,0,v.length,r.vector[2],r.start[2],r.length[2]));
}
EqualityVerifier PublicFieldVerifier
@Test public void testCastDecimalToBoolean(){
VectorizedRowBatch b=getBatchDecimalLong();
VectorExpression expr=new CastDecimalToBoolean(0,1);
DecimalColumnVector in=(DecimalColumnVector)b.cols[0];
in.vector[1].set(HiveDecimal.create(0));
expr.evaluate(b);
LongColumnVector r=(LongColumnVector)b.cols[1];
assertEquals(1,r.vector[0]);
assertEquals(0,r.vector[1]);
assertEquals(1,r.vector[2]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testCastLongToString(){
VectorizedRowBatch b=TestVectorMathFunctions.getBatchForStringMath();
BytesColumnVector resultV=(BytesColumnVector)b.cols[2];
b.cols[1].noNulls=true;
VectorExpression expr=new CastLongToString(1,2);
expr.evaluate(b);
byte[] num255=toBytes("255");
Assert.assertEquals(0,StringExpr.compare(num255,0,num255.length,resultV.vector[1],resultV.start[1],resultV.length[1]));
}
EqualityVerifier PublicFieldVerifier
@Test public void testCastLongToBoolean(){
VectorizedRowBatch b=TestVectorMathFunctions.getVectorizedRowBatchLongInLongOut();
LongColumnVector inV=(LongColumnVector)b.cols[0];
inV.vector[0]=0;
LongColumnVector resultV=(LongColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new CastLongToBooleanViaLongToLong(0,1);
expr.evaluate(b);
Assert.assertEquals(0,resultV.vector[0]);
Assert.assertEquals(1,resultV.vector[1]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testCastDoubleToTimestamp(){
VectorizedRowBatch b=TestVectorMathFunctions.getVectorizedRowBatchDoubleInLongOut();
LongColumnVector resultV=(LongColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new CastDoubleToTimestampViaDoubleToLong(0,1);
expr.evaluate(b);
Assert.assertEquals(0,resultV.vector[3]);
Assert.assertEquals((long)(0.5d * NANOS_PER_SECOND),resultV.vector[4]);
}
EqualityVerifier PublicFieldVerifier
@Test public void testCastTimestampToLong(){
VectorizedRowBatch b=TestVectorMathFunctions.getVectorizedRowBatchLongInLongOut();
LongColumnVector inV=(LongColumnVector)b.cols[0];
inV.vector[0]=NANOS_PER_SECOND;
LongColumnVector resultV=(LongColumnVector)b.cols[1];
b.cols[0].noNulls=true;
VectorExpression expr=new CastTimestampToLongViaLongToLong(0,1);
expr.evaluate(b);
Assert.assertEquals(1,resultV.vector[0]);
}
Class: org.apache.hadoop.hive.ql.exec.vector.udf.TestVectorUDFAdaptor APIUtilityVerifier BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testGenericUDF(){
ExprNodeGenericFuncDesc funcDesc;
GenericUDF genericUDF=new GenericUDFIsNull();
TypeInfo typeInfoStr=TypeInfoFactory.stringTypeInfo;
List children=new ArrayList();
children.add(new ExprNodeColumnDesc(typeInfoStr,"col0","tablename",false));
children.add(new ExprNodeConstantDesc(typeInfoStr,"UNKNOWN"));
VectorUDFArgDesc[] argDescs=new VectorUDFArgDesc[2];
for (int i=0; i < 2; i++) {
argDescs[i]=new VectorUDFArgDesc();
}
argDescs[0].setVariable(0);
argDescs[1].setConstant((ExprNodeConstantDesc)children.get(1));
funcDesc=new ExprNodeGenericFuncDesc(typeInfoStr,genericUDF,"myisnull",children);
VectorUDFAdaptor vudf=null;
try {
vudf=new VectorUDFAdaptor(funcDesc,3,"String",argDescs);
}
catch ( HiveException e) {
assertTrue(false);
}
VectorizedRowBatch b;
byte[] red=null;
byte[] unknown=null;
try {
red="red".getBytes("UTF-8");
unknown="UNKNOWN".getBytes("UTF-8");
}
catch ( Exception e) {
;
}
BytesColumnVector out;
b=getBatchStrDblLongWithStrOut();
b.cols[0].noNulls=false;
b.cols[0].isNull[0]=true;
vudf.evaluate(b);
out=(BytesColumnVector)b.cols[3];
int cmp=StringExpr.compare(red,0,red.length,out.vector[1],out.start[1],out.length[1]);
assertEquals(0,cmp);
cmp=StringExpr.compare(unknown,0,unknown.length,out.vector[0],out.start[0],out.length[0]);
assertEquals(0,cmp);
assertTrue(out.noNulls || !out.isNull[0]);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testMultiArgumentUDF(){
ExprNodeGenericFuncDesc funcDesc;
TypeInfo typeInfoStr=TypeInfoFactory.stringTypeInfo;
TypeInfo typeInfoLong=TypeInfoFactory.longTypeInfo;
TypeInfo typeInfoDbl=TypeInfoFactory.doubleTypeInfo;
GenericUDFBridge genericUDFBridge=new GenericUDFBridge("testudf",false,ConcatTextLongDoubleUDF.class.getName());
List children=new ArrayList();
children.add(new ExprNodeColumnDesc(typeInfoStr,"col0","tablename",false));
children.add(new ExprNodeColumnDesc(typeInfoLong,"col1","tablename",false));
children.add(new ExprNodeColumnDesc(typeInfoDbl,"col2","tablename",false));
VectorUDFArgDesc[] argDescs=new VectorUDFArgDesc[3];
for (int i=0; i < 3; i++) {
argDescs[i]=new VectorUDFArgDesc();
argDescs[i].setVariable(i);
}
funcDesc=new ExprNodeGenericFuncDesc(typeInfoStr,genericUDFBridge,genericUDFBridge.getUdfName(),children);
VectorUDFAdaptor vudf=null;
try {
vudf=new VectorUDFAdaptor(funcDesc,3,"String",argDescs);
}
catch ( HiveException e) {
assertTrue(false);
throw new RuntimeException(e);
}
VectorizedRowBatch b=getBatchStrDblLongWithStrOut();
vudf.evaluate(b);
byte[] result=null;
byte[] result2=null;
try {
result="red:1:1.0".getBytes("UTF-8");
result2="blue:0:0.0".getBytes("UTF-8");
}
catch ( Exception e) {
;
}
BytesColumnVector out=(BytesColumnVector)b.cols[3];
int cmp=StringExpr.compare(result,0,result.length,out.vector[1],out.start[1],out.length[1]);
assertEquals(0,cmp);
assertTrue(out.noNulls);
b=getBatchStrDblLongWithStrOut();
b.cols[1].noNulls=false;
vudf.evaluate(b);
out=(BytesColumnVector)b.cols[3];
assertFalse(out.noNulls);
assertTrue(out.isNull[1]);
b=getBatchStrDblLongWithStrOut();
b.cols[0].isRepeating=true;
b.cols[1].isRepeating=true;
b.cols[2].isRepeating=true;
vudf.evaluate(b);
out=(BytesColumnVector)b.cols[3];
assertTrue(out.isRepeating);
cmp=StringExpr.compare(result2,0,result2.length,out.vector[0],out.start[0],out.length[0]);
assertEquals(0,cmp);
assertTrue(out.noNulls);
}
BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testLongUDF(){
ExprNodeGenericFuncDesc funcDesc;
TypeInfo typeInfo=TypeInfoFactory.longTypeInfo;
GenericUDFBridge genericUDFBridge=new GenericUDFBridge("longudf",false,LongUDF.class.getName());
List children=new ArrayList();
ExprNodeColumnDesc colDesc=new ExprNodeColumnDesc(typeInfo,"col0","tablename",false);
children.add(colDesc);
VectorUDFArgDesc[] argDescs=new VectorUDFArgDesc[1];
argDescs[0]=new VectorUDFArgDesc();
argDescs[0].setVariable(0);
funcDesc=new ExprNodeGenericFuncDesc(typeInfo,genericUDFBridge,genericUDFBridge.getUdfName(),children);
VectorUDFAdaptor vudf=null;
try {
vudf=new VectorUDFAdaptor(funcDesc,1,"Long",argDescs);
}
catch ( HiveException e) {
assertTrue(false);
}
VectorizedRowBatch b=getBatchLongInLongOut();
vudf.evaluate(b);
LongColumnVector out=(LongColumnVector)b.cols[1];
assertEquals(1000,out.vector[0]);
assertEquals(1001,out.vector[1]);
assertEquals(1002,out.vector[2]);
assertTrue(out.noNulls);
assertFalse(out.isRepeating);
b=getBatchLongInLongOut();
out=(LongColumnVector)b.cols[1];
b.cols[0].noNulls=false;
vudf.evaluate(b);
assertFalse(out.noNulls);
assertEquals(1000,out.vector[0]);
assertEquals(1001,out.vector[1]);
assertTrue(out.isNull[2]);
assertFalse(out.isRepeating);
b=getBatchLongInLongOut();
out=(LongColumnVector)b.cols[1];
b.cols[0].isRepeating=true;
vudf.evaluate(b);
assertTrue(b.cols[1].isRepeating && out.vector[0] == 1000 || !b.cols[1].isRepeating && out.vector[2] == 1000);
assertEquals(3,b.size);
}
Class: org.apache.hadoop.hive.ql.hooks.TestHooks APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testQueryRedactor() throws Exception {
HiveConf conf=new HiveConf(TestHooks.class);
HiveConf.setVar(conf,HiveConf.ConfVars.QUERYREDACTORHOOKS,SimpleQueryRedactor.class.getName());
Driver driver=createDriver(conf);
int ret=driver.compile("select 'XXX' from t1");
assertEquals("Checking command success",0,ret);
assertEquals("select 'AAA' from t1",HiveConf.getVar(conf,HiveConf.ConfVars.HIVEQUERYSTRING));
}
APIUtilityVerifier EqualityVerifier
@Test public void testRedactLogString() throws Exception {
HiveConf conf=new HiveConf(TestHooks.class);
String str;
HiveConf.setVar(conf,HiveConf.ConfVars.QUERYREDACTORHOOKS,SimpleQueryRedactor.class.getName());
str=HookUtils.redactLogString(null,null);
assertEquals(str,null);
str=HookUtils.redactLogString(conf,null);
assertEquals(str,null);
str=HookUtils.redactLogString(conf,"select 'XXX' from t1");
assertEquals(str,"select 'AAA' from t1");
}
Class: org.apache.hadoop.hive.ql.io.TestAcidUtils APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void deltasWithOpenTxnsNotInCompact() throws Exception {
Configuration conf=new Configuration();
MockFileSystem fs=new MockFileSystem(conf,new MockFile("mock:/tbl/part1/delta_1_1/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_2_5/bucket_0",500,new byte[0]));
Path part=new MockPath(fs,"mock:/tbl/part1");
AcidUtils.Directory dir=AcidUtils.getAcidState(part,conf,new ValidCompactorTxnList("100:4"));
List delts=dir.getCurrentDirectories();
assertEquals(1,delts.size());
assertEquals("mock:/tbl/part1/delta_1_1",delts.get(0).getPath().toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testOriginal() throws Exception {
Configuration conf=new Configuration();
MockFileSystem fs=new MockFileSystem(conf,new MockFile("mock:/tbl/part1/000000_0",500,new byte[0]),new MockFile("mock:/tbl/part1/000001_1",500,new byte[0]),new MockFile("mock:/tbl/part1/000002_0",500,new byte[0]),new MockFile("mock:/tbl/part1/random",500,new byte[0]),new MockFile("mock:/tbl/part1/_done",0,new byte[0]),new MockFile("mock:/tbl/part1/subdir/000000_0",0,new byte[0]));
AcidUtils.Directory dir=AcidUtils.getAcidState(new MockPath(fs,"/tbl/part1"),conf,new ValidReadTxnList("100:"));
assertEquals(null,dir.getBaseDirectory());
assertEquals(0,dir.getCurrentDirectories().size());
assertEquals(0,dir.getObsolete().size());
List result=dir.getOriginalFiles();
assertEquals(5,result.size());
assertEquals("mock:/tbl/part1/000000_0",result.get(0).getFileStatus().getPath().toString());
assertEquals("mock:/tbl/part1/000001_1",result.get(1).getFileStatus().getPath().toString());
assertEquals("mock:/tbl/part1/000002_0",result.get(2).getFileStatus().getPath().toString());
assertEquals("mock:/tbl/part1/random",result.get(3).getFileStatus().getPath().toString());
assertEquals("mock:/tbl/part1/subdir/000000_0",result.get(4).getFileStatus().getPath().toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testBaseDeltas() throws Exception {
Configuration conf=new Configuration();
MockFileSystem fs=new MockFileSystem(conf,new MockFile("mock:/tbl/part1/base_5/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/base_10/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/base_49/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_025_025/bucket_0",0,new byte[0]),new MockFile("mock:/tbl/part1/delta_029_029/bucket_0",0,new byte[0]),new MockFile("mock:/tbl/part1/delta_025_030/bucket_0",0,new byte[0]),new MockFile("mock:/tbl/part1/delta_050_105/bucket_0",0,new byte[0]),new MockFile("mock:/tbl/part1/delta_90_120/bucket_0",0,new byte[0]));
AcidUtils.Directory dir=AcidUtils.getAcidState(new TestInputOutputFormat.MockPath(fs,"mock:/tbl/part1"),conf,new ValidReadTxnList("100:"));
assertEquals("mock:/tbl/part1/base_49",dir.getBaseDirectory().toString());
List obsolete=dir.getObsolete();
assertEquals(5,obsolete.size());
assertEquals("mock:/tbl/part1/base_10",obsolete.get(0).getPath().toString());
assertEquals("mock:/tbl/part1/base_5",obsolete.get(1).getPath().toString());
assertEquals("mock:/tbl/part1/delta_025_030",obsolete.get(2).getPath().toString());
assertEquals("mock:/tbl/part1/delta_025_025",obsolete.get(3).getPath().toString());
assertEquals("mock:/tbl/part1/delta_029_029",obsolete.get(4).getPath().toString());
assertEquals(0,dir.getOriginalFiles().size());
List deltas=dir.getCurrentDirectories();
assertEquals(1,deltas.size());
AcidUtils.ParsedDelta delt=deltas.get(0);
assertEquals("mock:/tbl/part1/delta_050_105",delt.getPath().toString());
assertEquals(50,delt.getMinTransaction());
assertEquals(105,delt.getMaxTransaction());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
/**
* @since 1.3.0
* @throws Exception
*/
@Test public void deltasWithOpenTxnInRead2() throws Exception {
Configuration conf=new Configuration();
MockFileSystem fs=new MockFileSystem(conf,new MockFile("mock:/tbl/part1/delta_1_1/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_2_5/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_4_4_1/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_4_4_3/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_101_101_1/bucket_0",500,new byte[0]));
Path part=new MockPath(fs,"mock:/tbl/part1");
AcidUtils.Directory dir=AcidUtils.getAcidState(part,conf,new ValidReadTxnList("100:4"));
List delts=dir.getCurrentDirectories();
assertEquals(2,delts.size());
assertEquals("mock:/tbl/part1/delta_1_1",delts.get(0).getPath().toString());
assertEquals("mock:/tbl/part1/delta_2_5",delts.get(1).getPath().toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testOriginalDeltas() throws Exception {
Configuration conf=new Configuration();
MockFileSystem fs=new MockFileSystem(conf,new MockFile("mock:/tbl/part1/000000_0",500,new byte[0]),new MockFile("mock:/tbl/part1/000001_1",500,new byte[0]),new MockFile("mock:/tbl/part1/000002_0",500,new byte[0]),new MockFile("mock:/tbl/part1/random",500,new byte[0]),new MockFile("mock:/tbl/part1/_done",0,new byte[0]),new MockFile("mock:/tbl/part1/subdir/000000_0",0,new byte[0]),new MockFile("mock:/tbl/part1/delta_025_025/bucket_0",0,new byte[0]),new MockFile("mock:/tbl/part1/delta_029_029/bucket_0",0,new byte[0]),new MockFile("mock:/tbl/part1/delta_025_030/bucket_0",0,new byte[0]),new MockFile("mock:/tbl/part1/delta_050_100/bucket_0",0,new byte[0]),new MockFile("mock:/tbl/part1/delta_101_101/bucket_0",0,new byte[0]));
AcidUtils.Directory dir=AcidUtils.getAcidState(new TestInputOutputFormat.MockPath(fs,"mock:/tbl/part1"),conf,new ValidReadTxnList("100:"));
assertEquals(null,dir.getBaseDirectory());
List obsolete=dir.getObsolete();
assertEquals(2,obsolete.size());
assertEquals("mock:/tbl/part1/delta_025_025",obsolete.get(0).getPath().toString());
assertEquals("mock:/tbl/part1/delta_029_029",obsolete.get(1).getPath().toString());
List result=dir.getOriginalFiles();
assertEquals(5,result.size());
assertEquals("mock:/tbl/part1/000000_0",result.get(0).getFileStatus().getPath().toString());
assertEquals("mock:/tbl/part1/000001_1",result.get(1).getFileStatus().getPath().toString());
assertEquals("mock:/tbl/part1/000002_0",result.get(2).getFileStatus().getPath().toString());
assertEquals("mock:/tbl/part1/random",result.get(3).getFileStatus().getPath().toString());
assertEquals("mock:/tbl/part1/subdir/000000_0",result.get(4).getFileStatus().getPath().toString());
List deltas=dir.getCurrentDirectories();
assertEquals(2,deltas.size());
AcidUtils.ParsedDelta delt=deltas.get(0);
assertEquals("mock:/tbl/part1/delta_025_030",delt.getPath().toString());
assertEquals(25,delt.getMinTransaction());
assertEquals(30,delt.getMaxTransaction());
delt=deltas.get(1);
assertEquals("mock:/tbl/part1/delta_050_100",delt.getPath().toString());
assertEquals(50,delt.getMinTransaction());
assertEquals(100,delt.getMaxTransaction());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testObsoleteOriginals() throws Exception {
Configuration conf=new Configuration();
MockFileSystem fs=new MockFileSystem(conf,new MockFile("mock:/tbl/part1/base_10/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/base_5/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/000000_0",500,new byte[0]),new MockFile("mock:/tbl/part1/000001_1",500,new byte[0]));
Path part=new MockPath(fs,"/tbl/part1");
AcidUtils.Directory dir=AcidUtils.getAcidState(part,conf,new ValidReadTxnList("150:"));
List obsolete=dir.getObsolete();
assertEquals(3,obsolete.size());
assertEquals("mock:/tbl/part1/base_5",obsolete.get(0).getPath().toString());
assertEquals("mock:/tbl/part1/base_10",dir.getBaseDirectory().toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testBestBase() throws Exception {
Configuration conf=new Configuration();
MockFileSystem fs=new MockFileSystem(conf,new MockFile("mock:/tbl/part1/base_5/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/base_10/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/base_25/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/base_100/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/base_200/bucket_0",500,new byte[0]));
Path part=new MockPath(fs,"/tbl/part1");
AcidUtils.Directory dir=AcidUtils.getAcidState(part,conf,new ValidReadTxnList("150:"));
assertEquals("mock:/tbl/part1/base_200",dir.getBaseDirectory().toString());
List obsoletes=dir.getObsolete();
assertEquals(4,obsoletes.size());
assertEquals("mock:/tbl/part1/base_10",obsoletes.get(0).getPath().toString());
assertEquals("mock:/tbl/part1/base_100",obsoletes.get(1).getPath().toString());
assertEquals("mock:/tbl/part1/base_25",obsoletes.get(2).getPath().toString());
assertEquals("mock:/tbl/part1/base_5",obsoletes.get(3).getPath().toString());
assertEquals(0,dir.getOriginalFiles().size());
assertEquals(0,dir.getCurrentDirectories().size());
dir=AcidUtils.getAcidState(part,conf,new ValidReadTxnList("10:"));
assertEquals("mock:/tbl/part1/base_200",dir.getBaseDirectory().toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void deltasWithOpenTxnInRead() throws Exception {
Configuration conf=new Configuration();
MockFileSystem fs=new MockFileSystem(conf,new MockFile("mock:/tbl/part1/delta_1_1/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_2_5/bucket_0",500,new byte[0]));
Path part=new MockPath(fs,"mock:/tbl/part1");
AcidUtils.Directory dir=AcidUtils.getAcidState(part,conf,new ValidReadTxnList("100:4"));
List delts=dir.getCurrentDirectories();
assertEquals(2,delts.size());
assertEquals("mock:/tbl/part1/delta_1_1",delts.get(0).getPath().toString());
assertEquals("mock:/tbl/part1/delta_2_5",delts.get(1).getPath().toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
/**
* Hive 1.3.0 delta dir naming scheme which supports multi-statement txns
* @throws Exception
*/
@Test public void testOverlapingDelta2() throws Exception {
Configuration conf=new Configuration();
MockFileSystem fs=new MockFileSystem(conf,new MockFile("mock:/tbl/part1/delta_0000063_63_0/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_000062_62_0/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_000062_62_3/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_00061_61_0/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_40_60/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_0060_60_1/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_0060_60_4/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_0060_60_7/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_052_55/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_058_58/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/base_50/bucket_0",500,new byte[0]));
Path part=new MockPath(fs,"mock:/tbl/part1");
AcidUtils.Directory dir=AcidUtils.getAcidState(part,conf,new ValidReadTxnList("100:"));
assertEquals("mock:/tbl/part1/base_50",dir.getBaseDirectory().toString());
List obsolete=dir.getObsolete();
assertEquals(5,obsolete.size());
assertEquals("mock:/tbl/part1/delta_052_55",obsolete.get(0).getPath().toString());
assertEquals("mock:/tbl/part1/delta_058_58",obsolete.get(1).getPath().toString());
assertEquals("mock:/tbl/part1/delta_0060_60_1",obsolete.get(2).getPath().toString());
assertEquals("mock:/tbl/part1/delta_0060_60_4",obsolete.get(3).getPath().toString());
assertEquals("mock:/tbl/part1/delta_0060_60_7",obsolete.get(4).getPath().toString());
List delts=dir.getCurrentDirectories();
assertEquals(5,delts.size());
assertEquals("mock:/tbl/part1/delta_40_60",delts.get(0).getPath().toString());
assertEquals("mock:/tbl/part1/delta_00061_61_0",delts.get(1).getPath().toString());
assertEquals("mock:/tbl/part1/delta_000062_62_0",delts.get(2).getPath().toString());
assertEquals("mock:/tbl/part1/delta_000062_62_3",delts.get(3).getPath().toString());
assertEquals("mock:/tbl/part1/delta_0000063_63_0",delts.get(4).getPath().toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testParsing() throws Exception {
assertEquals(123,AcidUtils.parseBase(new Path("/tmp/base_000123")));
Path dir=new Path("/tmp/tbl");
Configuration conf=new Configuration();
AcidOutputFormat.Options opts=AcidUtils.parseBaseBucketFilename(new Path(dir,"base_567/bucket_123"),conf);
assertEquals(false,opts.getOldStyle());
assertEquals(true,opts.isWritingBase());
assertEquals(567,opts.getMaximumTransactionId());
assertEquals(0,opts.getMinimumTransactionId());
assertEquals(123,opts.getBucket());
opts=AcidUtils.parseBaseBucketFilename(new Path(dir,"000123_0"),conf);
assertEquals(true,opts.getOldStyle());
assertEquals(true,opts.isWritingBase());
assertEquals(123,opts.getBucket());
assertEquals(0,opts.getMinimumTransactionId());
assertEquals(0,opts.getMaximumTransactionId());
}
InternalCallVerifier EqualityVerifier
@Test public void testCreateFilename() throws Exception {
Path p=new Path("/tmp");
Configuration conf=new Configuration();
AcidOutputFormat.Options options=new AcidOutputFormat.Options(conf).setOldStyle(true).bucket(1);
assertEquals("/tmp/000001_0",AcidUtils.createFilename(p,options).toString());
options.bucket(123);
assertEquals("/tmp/000123_0",AcidUtils.createFilename(p,options).toString());
options.bucket(23).minimumTransactionId(100).maximumTransactionId(200).writingBase(true).setOldStyle(false);
assertEquals("/tmp/base_0000200/bucket_00023",AcidUtils.createFilename(p,options).toString());
options.writingBase(false);
assertEquals("/tmp/delta_0000100_0000200_0000/bucket_00023",AcidUtils.createFilename(p,options).toString());
options.statementId(-1);
assertEquals("/tmp/delta_0000100_0000200/bucket_00023",AcidUtils.createFilename(p,options).toString());
options.statementId(7);
assertEquals("/tmp/delta_0000100_0000200_0007/bucket_00023",AcidUtils.createFilename(p,options).toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testOverlapingDelta() throws Exception {
Configuration conf=new Configuration();
MockFileSystem fs=new MockFileSystem(conf,new MockFile("mock:/tbl/part1/delta_0000063_63/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_000062_62/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_00061_61/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_40_60/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_0060_60/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_052_55/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/base_50/bucket_0",500,new byte[0]));
Path part=new MockPath(fs,"mock:/tbl/part1");
AcidUtils.Directory dir=AcidUtils.getAcidState(part,conf,new ValidReadTxnList("100:"));
assertEquals("mock:/tbl/part1/base_50",dir.getBaseDirectory().toString());
List obsolete=dir.getObsolete();
assertEquals(2,obsolete.size());
assertEquals("mock:/tbl/part1/delta_052_55",obsolete.get(0).getPath().toString());
assertEquals("mock:/tbl/part1/delta_0060_60",obsolete.get(1).getPath().toString());
List delts=dir.getCurrentDirectories();
assertEquals(4,delts.size());
assertEquals("mock:/tbl/part1/delta_40_60",delts.get(0).getPath().toString());
assertEquals("mock:/tbl/part1/delta_00061_61",delts.get(1).getPath().toString());
assertEquals("mock:/tbl/part1/delta_000062_62",delts.get(2).getPath().toString());
assertEquals("mock:/tbl/part1/delta_0000063_63",delts.get(3).getPath().toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void deltasWithOpenTxnsNotInCompact2() throws Exception {
Configuration conf=new Configuration();
MockFileSystem fs=new MockFileSystem(conf,new MockFile("mock:/tbl/part1/delta_1_1/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_2_5/bucket_0",500,new byte[0]),new MockFile("mock:/tbl/part1/delta_2_5/bucket_0" + AcidUtils.DELTA_SIDE_FILE_SUFFIX,500,new byte[0]),new MockFile("mock:/tbl/part1/delta_6_10/bucket_0",500,new byte[0]));
Path part=new MockPath(fs,"mock:/tbl/part1");
AcidUtils.Directory dir=AcidUtils.getAcidState(part,conf,new ValidCompactorTxnList("100:4"));
List delts=dir.getCurrentDirectories();
assertEquals(1,delts.size());
assertEquals("mock:/tbl/part1/delta_1_1",delts.get(0).getPath().toString());
}
Class: org.apache.hadoop.hive.ql.io.TestHiveInputOutputBuffer InternalCallVerifier EqualityVerifier
/**
* Tests methods of {@link NonSyncDataInputBuffer}.
* @throws IOException
*/
@Test public void testBaseBuffers() throws IOException {
NonSyncDataOutputBuffer dob=new NonSyncDataOutputBuffer();
final Random r=new Random();
final long seed=0x0123456789ABCDEFL;
r.setSeed(seed);
System.out.println("SEED: " + seed);
writeJunk(dob,r,seed,1000);
NonSyncDataInputBuffer dib=new NonSyncDataInputBuffer();
dib.reset(dob.getData(),0,dob.getLength());
assertEquals(0,dib.getPosition());
assertEquals(dob.getLength(),dib.getLength());
readJunk(dib,r,seed,1000);
dob.reset();
writeJunk(dob,r,seed,1000);
dib.reset(dob.getData(),dob.getLength());
assertEquals(0,dib.getPosition());
assertEquals(dob.getLength(),dib.getLength());
readJunk(dib,r,seed,1000);
}
Class: org.apache.hadoop.hive.ql.io.TestRCFile APIUtilityVerifier IterativeVerifier BranchVerifier InternalCallVerifier EqualityVerifier
@Test public void testSimpleReadAndWrite() throws IOException, SerDeException {
cleanup();
byte[][] record_1={"123".getBytes("UTF-8"),"456".getBytes("UTF-8"),"789".getBytes("UTF-8"),"1000".getBytes("UTF-8"),"5.3".getBytes("UTF-8"),"hive and hadoop".getBytes("UTF-8"),new byte[0],"NULL".getBytes("UTF-8")};
byte[][] record_2={"100".getBytes("UTF-8"),"200".getBytes("UTF-8"),"123".getBytes("UTF-8"),"1000".getBytes("UTF-8"),"5.3".getBytes("UTF-8"),"hive and hadoop".getBytes("UTF-8"),new byte[0],"NULL".getBytes("UTF-8")};
RCFileOutputFormat.setColumnNumber(conf,expectedFieldsData.length);
RCFile.Writer writer=new RCFile.Writer(fs,conf,file,null,RCFile.createMetadata(new Text("apple"),new Text("block"),new Text("cat"),new Text("dog")),new DefaultCodec());
BytesRefArrayWritable bytes=new BytesRefArrayWritable(record_1.length);
for (int i=0; i < record_1.length; i++) {
BytesRefWritable cu=new BytesRefWritable(record_1[i],0,record_1[i].length);
bytes.set(i,cu);
}
writer.append(bytes);
bytes.clear();
for (int i=0; i < record_2.length; i++) {
BytesRefWritable cu=new BytesRefWritable(record_2[i],0,record_2[i].length);
bytes.set(i,cu);
}
writer.append(bytes);
writer.close();
Object[] expectedRecord_1={new ByteWritable((byte)123),new ShortWritable((short)456),new IntWritable(789),new LongWritable(1000),new DoubleWritable(5.3),new Text("hive and hadoop"),null,null};
Object[] expectedRecord_2={new ByteWritable((byte)100),new ShortWritable((short)200),new IntWritable(123),new LongWritable(1000),new DoubleWritable(5.3),new Text("hive and hadoop"),null,null};
RCFile.Reader reader=new RCFile.Reader(fs,file,conf);
assertEquals(new Text("block"),reader.getMetadata().get(new Text("apple")));
assertEquals(new Text("block"),reader.getMetadataValueOf(new Text("apple")));
assertEquals(new Text("dog"),reader.getMetadataValueOf(new Text("cat")));
LongWritable rowID=new LongWritable();
for (int i=0; i < 2; i++) {
reader.next(rowID);
BytesRefArrayWritable cols=new BytesRefArrayWritable();
reader.getCurrentRow(cols);
cols.resetValid(8);
Object row=serDe.deserialize(cols);
StructObjectInspector oi=(StructObjectInspector)serDe.getObjectInspector();
List extends StructField> fieldRefs=oi.getAllStructFieldRefs();
assertEquals("Field size should be 8",8,fieldRefs.size());
for (int j=0; j < fieldRefs.size(); j++) {
Object fieldData=oi.getStructFieldData(row,fieldRefs.get(j));
Object standardWritableData=ObjectInspectorUtils.copyToStandardObject(fieldData,fieldRefs.get(j).getFieldObjectInspector(),ObjectInspectorCopyOption.WRITABLE);
if (i == 0) {
assertEquals("Field " + i,standardWritableData,expectedRecord_1[j]);
}
else {
assertEquals("Field " + i,standardWritableData,expectedRecord_2[j]);
}
}
}
reader.close();
}
APIUtilityVerifier IterativeVerifier BranchVerifier BooleanVerifier InternalCallVerifier IdentityVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* Tests {@link RCFile.Reader#getColumn(int,BytesRefArrayWritable) } method.
* @throws IOException
*/
@Test public void testGetColumn() throws IOException {
cleanup();
RCFileOutputFormat.setColumnNumber(conf,expectedFieldsData.length);
RCFile.Writer writer=new RCFile.Writer(fs,conf,file,null,RCFile.createMetadata(new Text("apple"),new Text("block"),new Text("cat"),new Text("dog")),new DefaultCodec());
byte[][] record_1={"123".getBytes("UTF-8"),"456".getBytes("UTF-8"),"789".getBytes("UTF-8"),"1000".getBytes("UTF-8"),"5.3".getBytes("UTF-8"),"hive and hadoop".getBytes("UTF-8"),new byte[0],"NULL".getBytes("UTF-8")};
byte[][] record_2={"100".getBytes("UTF-8"),"200".getBytes("UTF-8"),"123".getBytes("UTF-8"),"1000".getBytes("UTF-8"),"5.3".getBytes("UTF-8"),"hive and hadoop".getBytes("UTF-8"),new byte[0],"NULL".getBytes("UTF-8")};
BytesRefArrayWritable bytes=new BytesRefArrayWritable(record_1.length);
for (int i=0; i < record_1.length; i++) {
BytesRefWritable cu=new BytesRefWritable(record_1[i],0,record_1[i].length);
bytes.set(i,cu);
}
writer.append(bytes);
bytes.clear();
for (int i=0; i < record_2.length; i++) {
BytesRefWritable cu=new BytesRefWritable(record_2[i],0,record_2[i].length);
bytes.set(i,cu);
}
writer.append(bytes);
writer.close();
RCFile.Reader reader=new RCFile.Reader(fs,file,conf);
LongWritable rowID=new LongWritable();
assertTrue(reader.next(rowID));
assertEquals(rowID.get(),0L);
assertTrue(reader.next(rowID));
assertEquals(rowID.get(),1L);
BytesRefArrayWritable result=null;
BytesRefWritable brw;
for (int col=0; col < 8; col++) {
BytesRefArrayWritable result2=reader.getColumn(col,result);
if (result == null) {
assertNotNull(result2);
result=result2;
}
else {
assertSame(result2,result);
}
assertEquals(2,result.size());
for (int row=0; row < result.size(); row++) {
brw=result.get(row);
int start=brw.getStart();
int len=brw.getLength();
byte[] actualData=Arrays.copyOfRange(brw.getData(),start,start + len);
byte[] expectedData=(row == 0) ? record_1[col] : record_2[col];
assertArrayEquals("col=" + col + " : row="+ row,expectedData,actualData);
}
result.clear();
}
reader.close();
}
IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testReadOldFileHeader() throws IOException {
String[] row=new String[]{"Tester","Bart","333 X St.","Reno","NV","USA"};
RCFile.Reader reader=new RCFile.Reader(fs,new Path("src/test/data/rc-file-v0.rc"),conf);
LongWritable rowID=new LongWritable();
BytesRefArrayWritable cols=new BytesRefArrayWritable();
assertTrue("old file reader first row",reader.next(rowID));
reader.getCurrentRow(cols);
assertEquals(row.length,cols.size());
for (int i=0; i < cols.size(); ++i) {
assertEquals(row[i],new String(cols.get(i).getBytesCopy()));
}
assertFalse("old file reader end",reader.next(rowID));
reader.close();
}
Class: org.apache.hadoop.hive.ql.io.TestStorageFormatDescriptor EqualityVerifier
@Test public void testNames(){
Assert.assertEquals(Sets.newHashSet(IOConstants.TEXTFILE),(new TextFileStorageFormatDescriptor()).getNames());
Assert.assertEquals(Sets.newHashSet(IOConstants.SEQUENCEFILE),(new SequenceFileStorageFormatDescriptor()).getNames());
Assert.assertEquals(Sets.newHashSet(IOConstants.RCFILE),(new RCFileStorageFormatDescriptor()).getNames());
Assert.assertEquals(Sets.newHashSet(IOConstants.ORC,IOConstants.ORCFILE),(new ORCFileStorageFormatDescriptor()).getNames());
Assert.assertEquals(Sets.newHashSet(IOConstants.PARQUET,IOConstants.PARQUETFILE),(new ParquetFileStorageFormatDescriptor()).getNames());
Assert.assertEquals(Sets.newHashSet(IOConstants.AVRO,IOConstants.AVROFILE),(new AvroStorageFormatDescriptor()).getNames());
}
Class: org.apache.hadoop.hive.ql.io.avro.TestAvroGenericRecordReader InternalCallVerifier EqualityVerifier
@Test public void emptyFile() throws IOException {
AvroGenericRecordReader reader=new AvroGenericRecordReader(jobConf,emptyFileSplit,reporter);
Assert.assertEquals(false,reader.next(null,null));
Assert.assertEquals(0,reader.getPos());
reader.close();
}
Class: org.apache.hadoop.hive.ql.io.orc.TestColumnStatistics APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDateMerge() throws Exception {
TypeDescription schema=TypeDescription.createDate();
ColumnStatisticsImpl stats1=ColumnStatisticsImpl.create(schema);
ColumnStatisticsImpl stats2=ColumnStatisticsImpl.create(schema);
stats1.updateDate(new DateWritable(1000));
stats1.updateDate(new DateWritable(100));
stats2.updateDate(new DateWritable(10));
stats2.updateDate(new DateWritable(2000));
stats1.merge(stats2);
DateColumnStatistics typed=(DateColumnStatistics)stats1;
assertEquals(new DateWritable(10).get(),typed.getMinimum());
assertEquals(new DateWritable(2000).get(),typed.getMaximum());
stats1.reset();
stats1.updateDate(new DateWritable(-10));
stats1.updateDate(new DateWritable(10000));
stats1.merge(stats2);
assertEquals(new DateWritable(-10).get(),typed.getMinimum());
assertEquals(new DateWritable(10000).get(),typed.getMaximum());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testStringMerge() throws Exception {
TypeDescription schema=TypeDescription.createString();
ColumnStatisticsImpl stats1=ColumnStatisticsImpl.create(schema);
ColumnStatisticsImpl stats2=ColumnStatisticsImpl.create(schema);
stats1.updateString(new Text("bob"));
stats1.updateString(new Text("david"));
stats1.updateString(new Text("charles"));
stats2.updateString(new Text("anne"));
byte[] erin=new byte[]{0,1,2,3,4,5,101,114,105,110};
stats2.updateString(erin,6,4,5);
assertEquals(24,((StringColumnStatistics)stats2).getSum());
stats1.merge(stats2);
StringColumnStatistics typed=(StringColumnStatistics)stats1;
assertEquals("anne",typed.getMinimum());
assertEquals("erin",typed.getMaximum());
assertEquals(39,typed.getSum());
stats1.reset();
stats1.updateString(new Text("aaa"));
stats1.updateString(new Text("zzz"));
stats1.merge(stats2);
assertEquals("aaa",typed.getMinimum());
assertEquals("zzz",typed.getMaximum());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testHasNull() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(SimpleStruct.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).rowIndexStride(1000).stripeSize(10000).bufferSize(10000));
for (int i=0; i < 1000; i++) {
writer.addRow(new SimpleStruct(bytes(1,2,3),"RG1"));
}
for (int i=0; i < 1000; i++) {
writer.addRow(new SimpleStruct(bytes(1,2,3),null));
}
for (int i=0; i < 1000; i++) {
writer.addRow(new SimpleStruct(bytes(1,2,3),"RG3"));
}
for (int i=0; i < 1000; i++) {
writer.addRow(new SimpleStruct(bytes(1,2,3),null));
}
for (int i=0; i < 1000; i++) {
writer.addRow(new SimpleStruct(bytes(1,2,3),null));
}
for (int i=0; i < 5000; i++) {
writer.addRow(new SimpleStruct(bytes(1,2,3),null));
}
for (int i=0; i < 5000; i++) {
writer.addRow(new SimpleStruct(bytes(1,2,3),"STRIPE-3"));
}
for (int i=0; i < 5000; i++) {
writer.addRow(new SimpleStruct(bytes(1,2,3),null));
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
ColumnStatistics[] stats=reader.getStatistics();
assertEquals(20000,stats[0].getNumberOfValues());
assertEquals(20000,stats[1].getNumberOfValues());
assertEquals(7000,stats[2].getNumberOfValues());
assertEquals(false,stats[0].hasNull());
assertEquals(false,stats[1].hasNull());
assertEquals(true,stats[2].hasNull());
List stripeStats=reader.getStripeStatistics();
StripeStatistics ss1=stripeStats.get(0);
ColumnStatistics ss1_cs1=ss1.getColumnStatistics()[0];
ColumnStatistics ss1_cs2=ss1.getColumnStatistics()[1];
ColumnStatistics ss1_cs3=ss1.getColumnStatistics()[2];
assertEquals(false,ss1_cs1.hasNull());
assertEquals(false,ss1_cs2.hasNull());
assertEquals(true,ss1_cs3.hasNull());
StripeStatistics ss2=stripeStats.get(1);
ColumnStatistics ss2_cs1=ss2.getColumnStatistics()[0];
ColumnStatistics ss2_cs2=ss2.getColumnStatistics()[1];
ColumnStatistics ss2_cs3=ss2.getColumnStatistics()[2];
assertEquals(false,ss2_cs1.hasNull());
assertEquals(false,ss2_cs2.hasNull());
assertEquals(true,ss2_cs3.hasNull());
StripeStatistics ss3=stripeStats.get(2);
ColumnStatistics ss3_cs1=ss3.getColumnStatistics()[0];
ColumnStatistics ss3_cs2=ss3.getColumnStatistics()[1];
ColumnStatistics ss3_cs3=ss3.getColumnStatistics()[2];
assertEquals(false,ss3_cs1.hasNull());
assertEquals(false,ss3_cs2.hasNull());
assertEquals(false,ss3_cs3.hasNull());
StripeStatistics ss4=stripeStats.get(3);
ColumnStatistics ss4_cs1=ss4.getColumnStatistics()[0];
ColumnStatistics ss4_cs2=ss4.getColumnStatistics()[1];
ColumnStatistics ss4_cs3=ss4.getColumnStatistics()[2];
assertEquals(false,ss4_cs1.hasNull());
assertEquals(false,ss4_cs2.hasNull());
assertEquals(true,ss4_cs3.hasNull());
PrintStream origOut=System.out;
String outputFilename="orc-file-has-null.out";
FileOutputStream myOut=new FileOutputStream(workDir + File.separator + outputFilename);
System.setOut(new PrintStream(myOut));
FileDump.main(new String[]{testFilePath.toString(),"--rowindex=2"});
System.out.flush();
System.setOut(origOut);
TestFileDump.checkOutput(outputFilename,workDir + File.separator + outputFilename);
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDoubleMerge() throws Exception {
TypeDescription schema=TypeDescription.createDouble();
ColumnStatisticsImpl stats1=ColumnStatisticsImpl.create(schema);
ColumnStatisticsImpl stats2=ColumnStatisticsImpl.create(schema);
stats1.updateDouble(10.0);
stats1.updateDouble(100.0);
stats2.updateDouble(1.0);
stats2.updateDouble(1000.0);
stats1.merge(stats2);
DoubleColumnStatistics typed=(DoubleColumnStatistics)stats1;
assertEquals(1.0,typed.getMinimum(),0.001);
assertEquals(1000.0,typed.getMaximum(),0.001);
stats1.reset();
stats1.updateDouble(-10);
stats1.updateDouble(10000);
stats1.merge(stats2);
assertEquals(-10,typed.getMinimum(),0.001);
assertEquals(10000,typed.getMaximum(),0.001);
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDecimalMerge() throws Exception {
TypeDescription schema=TypeDescription.createDecimal().withPrecision(38).withScale(16);
ColumnStatisticsImpl stats1=ColumnStatisticsImpl.create(schema);
ColumnStatisticsImpl stats2=ColumnStatisticsImpl.create(schema);
stats1.updateDecimal(HiveDecimal.create(10));
stats1.updateDecimal(HiveDecimal.create(100));
stats2.updateDecimal(HiveDecimal.create(1));
stats2.updateDecimal(HiveDecimal.create(1000));
stats1.merge(stats2);
DecimalColumnStatistics typed=(DecimalColumnStatistics)stats1;
assertEquals(1,typed.getMinimum().longValue());
assertEquals(1000,typed.getMaximum().longValue());
stats1.reset();
stats1.updateDecimal(HiveDecimal.create(-10));
stats1.updateDecimal(HiveDecimal.create(10000));
stats1.merge(stats2);
assertEquals(-10,typed.getMinimum().longValue());
assertEquals(10000,typed.getMaximum().longValue());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testLongMerge() throws Exception {
TypeDescription schema=TypeDescription.createInt();
ColumnStatisticsImpl stats1=ColumnStatisticsImpl.create(schema);
ColumnStatisticsImpl stats2=ColumnStatisticsImpl.create(schema);
stats1.updateInteger(10,2);
stats2.updateInteger(1,1);
stats2.updateInteger(1000,1);
stats1.merge(stats2);
IntegerColumnStatistics typed=(IntegerColumnStatistics)stats1;
assertEquals(1,typed.getMinimum());
assertEquals(1000,typed.getMaximum());
stats1.reset();
stats1.updateInteger(-10,1);
stats1.updateInteger(10000,1);
stats1.merge(stats2);
assertEquals(-10,typed.getMinimum());
assertEquals(10000,typed.getMaximum());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testTimestampMerge() throws Exception {
TypeDescription schema=TypeDescription.createTimestamp();
ColumnStatisticsImpl stats1=ColumnStatisticsImpl.create(schema);
ColumnStatisticsImpl stats2=ColumnStatisticsImpl.create(schema);
stats1.updateTimestamp(new Timestamp(10));
stats1.updateTimestamp(new Timestamp(100));
stats2.updateTimestamp(new Timestamp(1));
stats2.updateTimestamp(new Timestamp(1000));
stats1.merge(stats2);
TimestampColumnStatistics typed=(TimestampColumnStatistics)stats1;
assertEquals(1,typed.getMinimum().getTime());
assertEquals(1000,typed.getMaximum().getTime());
stats1.reset();
stats1.updateTimestamp(new Timestamp(-10));
stats1.updateTimestamp(new Timestamp(10000));
stats1.merge(stats2);
assertEquals(-10,typed.getMinimum().getTime());
assertEquals(10000,typed.getMaximum().getTime());
}
Class: org.apache.hadoop.hive.ql.io.orc.TestFileDump APIUtilityVerifier EqualityVerifier
@Test public void testDataDump() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(AllTypesRecord.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(fs,testFilePath,conf,inspector,100000,CompressionKind.NONE,10000,1000);
Map m=new HashMap(2);
m.put("k1","v1");
writer.addRow(new AllTypesRecord(true,(byte)10,(short)100,1000,10000L,4.0f,20.0,HiveDecimal.create("4.2222"),new Timestamp(1416967764000L),new Date(1416967764000L),"string",new HiveChar("hello",5),new HiveVarchar("hello",10),m,Arrays.asList(100,200),new AllTypesRecord.Struct(10,"foo")));
m.clear();
m.put("k3","v3");
writer.addRow(new AllTypesRecord(false,(byte)20,(short)200,2000,20000L,8.0f,40.0,HiveDecimal.create("2.2222"),new Timestamp(1416967364000L),new Date(1411967764000L),"abcd",new HiveChar("world",5),new HiveVarchar("world",10),m,Arrays.asList(200,300),new AllTypesRecord.Struct(20,"bar")));
writer.close();
PrintStream origOut=System.out;
ByteArrayOutputStream myOut=new ByteArrayOutputStream();
System.setOut(new PrintStream(myOut));
FileDump.main(new String[]{testFilePath.toString(),"-d"});
System.out.flush();
System.setOut(origOut);
String[] lines=myOut.toString().split("\n");
assertEquals("{\"b\":true,\"bt\":10,\"s\":100,\"i\":1000,\"l\":10000,\"f\":4,\"d\":20,\"de\":\"4.2222\",\"t\":\"2014-11-25 18:09:24\",\"dt\":\"2014-11-25\",\"str\":\"string\",\"c\":\"hello \",\"vc\":\"hello\",\"m\":[{\"_key\":\"k1\",\"_value\":\"v1\"}],\"a\":[100,200],\"st\":{\"i\":10,\"s\":\"foo\"}}",lines[0]);
assertEquals("{\"b\":false,\"bt\":20,\"s\":200,\"i\":2000,\"l\":20000,\"f\":8,\"d\":40,\"de\":\"2.2222\",\"t\":\"2014-11-25 18:02:44\",\"dt\":\"2014-09-28\",\"str\":\"abcd\",\"c\":\"world \",\"vc\":\"world\",\"m\":[{\"_key\":\"k3\",\"_value\":\"v3\"}],\"a\":[200,300],\"st\":{\"i\":20,\"s\":\"bar\"}}",lines[1]);
}
Class: org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat APIUtilityVerifier IterativeVerifier BranchVerifier InternalCallVerifier EqualityVerifier
@Test public void testProjectedColumnSize() throws Exception {
long[] stripeSizes=new long[]{200,200,200,200,100};
MockFileSystem fs=new MockFileSystem(conf,new MockFile("mock:/a/file",500,createMockOrcFile(stripeSizes),new MockBlock("host1-1","host1-2","host1-3"),new MockBlock("host2-1","host0","host2-3"),new MockBlock("host0","host3-2","host3-3"),new MockBlock("host4-1","host4-2","host4-3"),new MockBlock("host5-1","host5-2","host5-3")));
HiveConf.setLongVar(conf,HiveConf.ConfVars.MAPREDMAXSPLITSIZE,300);
HiveConf.setLongVar(conf,HiveConf.ConfVars.MAPREDMINSPLITSIZE,200);
conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS,false);
conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR,"0");
OrcInputFormat.Context context=new OrcInputFormat.Context(conf);
OrcInputFormat.SplitGenerator splitter=new OrcInputFormat.SplitGenerator(new OrcInputFormat.SplitInfo(context,fs,fs.getFileStatus(new Path("/a/file")),null,true,new ArrayList(),true,null,null));
List results=splitter.call();
OrcSplit result=results.get(0);
assertEquals(3,results.size());
assertEquals(3,result.getStart());
assertEquals(400,result.getLength());
assertEquals(167468,result.getProjectedColumnsUncompressedSize());
result=results.get(1);
assertEquals(403,result.getStart());
assertEquals(400,result.getLength());
assertEquals(167468,result.getProjectedColumnsUncompressedSize());
result=results.get(2);
assertEquals(803,result.getStart());
assertEquals(100,result.getLength());
assertEquals(41867,result.getProjectedColumnsUncompressedSize());
HiveConf.setLongVar(conf,HiveConf.ConfVars.MAPREDMAXSPLITSIZE,0);
HiveConf.setLongVar(conf,HiveConf.ConfVars.MAPREDMINSPLITSIZE,0);
context=new OrcInputFormat.Context(conf);
splitter=new OrcInputFormat.SplitGenerator(new OrcInputFormat.SplitInfo(context,fs,fs.getFileStatus(new Path("/a/file")),null,true,new ArrayList(),true,null,null));
results=splitter.call();
assertEquals(5,results.size());
for (int i=0; i < stripeSizes.length; ++i) {
assertEquals("checking stripe " + i + " size",stripeSizes[i],results.get(i).getLength());
if (i == stripeSizes.length - 1) {
assertEquals(41867,results.get(i).getProjectedColumnsUncompressedSize());
}
else {
assertEquals(83734,results.get(i).getProjectedColumnsUncompressedSize());
}
}
HiveConf.setLongVar(conf,HiveConf.ConfVars.MAPREDMAXSPLITSIZE,1000);
HiveConf.setLongVar(conf,HiveConf.ConfVars.MAPREDMINSPLITSIZE,100000);
context=new OrcInputFormat.Context(conf);
splitter=new OrcInputFormat.SplitGenerator(new OrcInputFormat.SplitInfo(context,fs,fs.getFileStatus(new Path("/a/file")),null,true,new ArrayList(),true,null,null));
results=splitter.call();
assertEquals(1,results.size());
result=results.get(0);
assertEquals(3,result.getStart());
assertEquals(900,result.getLength());
assertEquals(376804,result.getProjectedColumnsUncompressedSize());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testEmptyFile() throws Exception {
Properties properties=new Properties();
properties.setProperty("columns","x,y");
properties.setProperty("columns.types","int:int");
HiveOutputFormat,?> outFormat=new OrcOutputFormat();
org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter writer=outFormat.getHiveRecordWriter(conf,testFilePath,MyRow.class,true,properties,Reporter.NULL);
writer.close(true);
SerDe serde=new OrcSerde();
SerDeUtils.initializeSerDe(serde,conf,properties,null);
InputFormat,?> in=new OrcInputFormat();
FileInputFormat.setInputPaths(conf,testFilePath.toString());
InputSplit[] splits=in.getSplits(conf,1);
assertTrue(0 == splits.length);
assertEquals(null,serde.getSerDeStats());
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier
@Test public void testVectorizationWithAcid() throws Exception {
StructObjectInspector inspector=new BigRowInspector();
JobConf conf=createMockExecutionEnvironment(workDir,new Path("mock:///"),"vectorizationAcid",inspector,true,1);
Path partDir=new Path(conf.get("mapred.input.dir"));
OrcRecordUpdater writer=new OrcRecordUpdater(partDir,new AcidOutputFormat.Options(conf).maximumTransactionId(10).writingBase(true).bucket(0).inspector(inspector).finalDestination(partDir));
for (int i=0; i < 100; ++i) {
BigRow row=new BigRow(i);
writer.insert(10,row);
}
WriterImpl baseWriter=(WriterImpl)writer.getWriter();
writer.close(false);
((MockOutputStream)baseWriter.getStream()).setBlocks(new MockBlock("host0","host1"));
HiveInputFormat,?> inputFormat=new HiveInputFormat();
InputSplit[] splits=inputFormat.getSplits(conf,10);
assertEquals(1,splits.length);
conf.set(IOConstants.SCHEMA_EVOLUTION_COLUMNS,BigRow.getColumnNamesProperty());
conf.set(IOConstants.SCHEMA_EVOLUTION_COLUMNS_TYPES,BigRow.getColumnTypesProperty());
HiveConf.setBoolVar(conf,HiveConf.ConfVars.HIVE_TRANSACTIONAL_TABLE_SCAN,true);
org.apache.hadoop.mapred.RecordReader reader=inputFormat.getRecordReader(splits[0],conf,Reporter.NULL);
NullWritable key=reader.createKey();
VectorizedRowBatch value=reader.createValue();
assertEquals(true,reader.next(key,value));
assertEquals(100,value.count());
LongColumnVector booleanColumn=(LongColumnVector)value.cols[0];
LongColumnVector byteColumn=(LongColumnVector)value.cols[1];
LongColumnVector shortColumn=(LongColumnVector)value.cols[2];
LongColumnVector intColumn=(LongColumnVector)value.cols[3];
LongColumnVector longColumn=(LongColumnVector)value.cols[4];
DoubleColumnVector floatColumn=(DoubleColumnVector)value.cols[5];
DoubleColumnVector doubleCoulmn=(DoubleColumnVector)value.cols[6];
BytesColumnVector stringColumn=(BytesColumnVector)value.cols[7];
DecimalColumnVector decimalColumn=(DecimalColumnVector)value.cols[8];
LongColumnVector dateColumn=(LongColumnVector)value.cols[9];
LongColumnVector timestampColumn=(LongColumnVector)value.cols[10];
for (int i=0; i < 100; i++) {
assertEquals("checking boolean " + i,i % 2 == 0 ? 1 : 0,booleanColumn.vector[i]);
assertEquals("checking byte " + i,(byte)i,byteColumn.vector[i]);
assertEquals("checking short " + i,(short)i,shortColumn.vector[i]);
assertEquals("checking int " + i,i,intColumn.vector[i]);
assertEquals("checking long " + i,i,longColumn.vector[i]);
assertEquals("checking float " + i,i,floatColumn.vector[i],0.0001);
assertEquals("checking double " + i,i,doubleCoulmn.vector[i],0.0001);
Text strValue=new Text();
strValue.set(stringColumn.vector[i],stringColumn.start[i],stringColumn.length[i]);
assertEquals("checking string " + i,new Text(Long.toHexString(i)),strValue);
assertEquals("checking decimal " + i,HiveDecimal.create(i),decimalColumn.vector[i].getHiveDecimal());
assertEquals("checking date " + i,i,dateColumn.vector[i]);
long millis=(long)i * MILLIS_IN_DAY;
millis-=LOCAL_TIMEZONE.getOffset(millis);
assertEquals("checking timestamp " + i,millis * 1000000L,timestampColumn.vector[i]);
}
assertEquals(false,reader.next(key,value));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testFileGenerator() throws Exception {
OrcInputFormat.Context context=new OrcInputFormat.Context(conf);
MockFileSystem fs=new MockFileSystem(conf,new MockFile("mock:/a/b/part-00",1000,new byte[0]),new MockFile("mock:/a/b/part-01",1000,new byte[0]),new MockFile("mock:/a/b/_part-02",1000,new byte[0]),new MockFile("mock:/a/b/.part-03",1000,new byte[0]),new MockFile("mock:/a/b/part-04",1000,new byte[0]));
OrcInputFormat.FileGenerator gen=new OrcInputFormat.FileGenerator(context,fs,new MockPath(fs,"mock:/a/b"),false);
OrcInputFormat.SplitStrategy splitStrategy=createSplitStrategy(context,gen);
assertEquals(true,splitStrategy instanceof OrcInputFormat.BISplitStrategy);
conf.set("mapreduce.input.fileinputformat.split.maxsize","500");
context=new OrcInputFormat.Context(conf);
fs=new MockFileSystem(conf,new MockFile("mock:/a/b/part-00",1000,new byte[1000]),new MockFile("mock:/a/b/part-01",1000,new byte[1000]),new MockFile("mock:/a/b/_part-02",1000,new byte[1000]),new MockFile("mock:/a/b/.part-03",1000,new byte[1000]),new MockFile("mock:/a/b/part-04",1000,new byte[1000]));
gen=new OrcInputFormat.FileGenerator(context,fs,new MockPath(fs,"mock:/a/b"),false);
splitStrategy=createSplitStrategy(context,gen);
assertEquals(true,splitStrategy instanceof OrcInputFormat.ETLSplitStrategy);
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testInOutFormat() throws Exception {
Properties properties=new Properties();
properties.setProperty("columns","x,y");
properties.setProperty("columns.types","int:int");
StructObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=(StructObjectInspector)ObjectInspectorFactory.getReflectionObjectInspector(MyRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
SerDe serde=new OrcSerde();
HiveOutputFormat,?> outFormat=new OrcOutputFormat();
org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter writer=outFormat.getHiveRecordWriter(conf,testFilePath,MyRow.class,true,properties,Reporter.NULL);
writer.write(serde.serialize(new MyRow(1,2),inspector));
writer.write(serde.serialize(new MyRow(2,2),inspector));
writer.write(serde.serialize(new MyRow(3,2),inspector));
writer.close(true);
serde=new OrcSerde();
SerDeUtils.initializeSerDe(serde,conf,properties,null);
assertEquals(OrcSerde.OrcSerdeRow.class,serde.getSerializedClass());
inspector=(StructObjectInspector)serde.getObjectInspector();
assertEquals("struct",inspector.getTypeName());
InputFormat,?> in=new OrcInputFormat();
FileInputFormat.setInputPaths(conf,testFilePath.toString());
InputSplit[] splits=in.getSplits(conf,1);
assertEquals(1,splits.length);
ArrayList fileList=new ArrayList();
assertEquals(false,((InputFormatChecker)in).validateInput(fs,new HiveConf(),fileList));
fileList.add(fs.getFileStatus(testFilePath));
assertEquals(true,((InputFormatChecker)in).validateInput(fs,new HiveConf(),fileList));
fileList.add(fs.getFileStatus(workDir));
assertEquals(false,((InputFormatChecker)in).validateInput(fs,new HiveConf(),fileList));
conf.set(IOConstants.SCHEMA_EVOLUTION_COLUMNS,MyRow.getColumnNamesProperty());
conf.set(IOConstants.SCHEMA_EVOLUTION_COLUMNS_TYPES,MyRow.getColumnTypesProperty());
org.apache.hadoop.mapred.RecordReader reader=in.getRecordReader(splits[0],conf,Reporter.NULL);
Object key=reader.createKey();
Writable value=(Writable)reader.createValue();
int rowNum=0;
List extends StructField> fields=inspector.getAllStructFieldRefs();
IntObjectInspector intInspector=(IntObjectInspector)fields.get(0).getFieldObjectInspector();
while (reader.next(key,value)) {
assertEquals(++rowNum,intInspector.get(inspector.getStructFieldData(serde.deserialize(value),fields.get(0))));
assertEquals(2,intInspector.get(inspector.getStructFieldData(serde.deserialize(value),fields.get(1))));
}
assertEquals(3,rowNum);
assertEquals(1.0,reader.getProgress(),0.00001);
reader.close();
ColumnProjectionUtils.appendReadColumns(conf,Collections.singletonList(0));
reader=in.getRecordReader(splits[0],conf,Reporter.NULL);
key=reader.createKey();
value=(Writable)reader.createValue();
rowNum=0;
fields=inspector.getAllStructFieldRefs();
while (reader.next(key,value)) {
assertEquals(++rowNum,intInspector.get(inspector.getStructFieldData(value,fields.get(0))));
assertEquals(null,inspector.getStructFieldData(value,fields.get(1)));
}
assertEquals(3,rowNum);
reader.close();
ColumnProjectionUtils.setReadAllColumns(conf);
reader=in.getRecordReader(splits[0],conf,Reporter.NULL);
key=reader.createKey();
value=(Writable)reader.createValue();
rowNum=0;
fields=inspector.getAllStructFieldRefs();
while (reader.next(key,value)) {
assertEquals(++rowNum,intInspector.get(inspector.getStructFieldData(value,fields.get(0))));
assertEquals(2,intInspector.get(inspector.getStructFieldData(serde.deserialize(value),fields.get(1))));
}
assertEquals(3,rowNum);
reader.close();
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testMROutput() throws Exception {
Properties properties=new Properties();
StructObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=(StructObjectInspector)ObjectInspectorFactory.getReflectionObjectInspector(NestedRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
SerDe serde=new OrcSerde();
OutputFormat,?> outFormat=new OrcOutputFormat();
RecordWriter writer=outFormat.getRecordWriter(fs,conf,testFilePath.toString(),Reporter.NULL);
writer.write(NullWritable.get(),serde.serialize(new NestedRow(1,2,3),inspector));
writer.write(NullWritable.get(),serde.serialize(new NestedRow(4,5,6),inspector));
writer.write(NullWritable.get(),serde.serialize(new NestedRow(7,8,9),inspector));
writer.close(Reporter.NULL);
serde=new OrcSerde();
properties.setProperty("columns","z,r");
properties.setProperty("columns.types","int:struct");
SerDeUtils.initializeSerDe(serde,conf,properties,null);
inspector=(StructObjectInspector)serde.getObjectInspector();
InputFormat,?> in=new OrcInputFormat();
FileInputFormat.setInputPaths(conf,testFilePath.toString());
InputSplit[] splits=in.getSplits(conf,1);
assertEquals(1,splits.length);
ColumnProjectionUtils.appendReadColumns(conf,Collections.singletonList(1));
conf.set("columns","z,r");
conf.set("columns.types","int:struct");
org.apache.hadoop.mapred.RecordReader reader=in.getRecordReader(splits[0],conf,Reporter.NULL);
Object key=reader.createKey();
Object value=reader.createValue();
int rowNum=0;
List extends StructField> fields=inspector.getAllStructFieldRefs();
StructObjectInspector inner=(StructObjectInspector)fields.get(1).getFieldObjectInspector();
List extends StructField> inFields=inner.getAllStructFieldRefs();
IntObjectInspector intInspector=(IntObjectInspector)fields.get(0).getFieldObjectInspector();
while (reader.next(key,value)) {
assertEquals(null,inspector.getStructFieldData(value,fields.get(0)));
Object sub=inspector.getStructFieldData(value,fields.get(1));
assertEquals(3 * rowNum + 1,intInspector.get(inner.getStructFieldData(sub,inFields.get(0))));
assertEquals(3 * rowNum + 2,intInspector.get(inner.getStructFieldData(sub,inFields.get(1))));
rowNum+=1;
}
assertEquals(3,rowNum);
reader.close();
}
InternalCallVerifier EqualityVerifier
@Test public void testSplitElimination() throws Exception {
Properties properties=new Properties();
properties.setProperty("columns","z,r");
properties.setProperty("columns.types","int:struct");
StructObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=(StructObjectInspector)ObjectInspectorFactory.getReflectionObjectInspector(NestedRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
SerDe serde=new OrcSerde();
OutputFormat,?> outFormat=new OrcOutputFormat();
conf.setInt("mapred.max.split.size",50);
RecordWriter writer=outFormat.getRecordWriter(fs,conf,testFilePath.toString(),Reporter.NULL);
writer.write(NullWritable.get(),serde.serialize(new NestedRow(1,2,3),inspector));
writer.write(NullWritable.get(),serde.serialize(new NestedRow(4,5,6),inspector));
writer.write(NullWritable.get(),serde.serialize(new NestedRow(7,8,9),inspector));
writer.close(Reporter.NULL);
serde=new OrcSerde();
SearchArgument sarg=SearchArgumentFactory.newBuilder().startAnd().lessThan("z",PredicateLeaf.Type.LONG,new Long(0)).end().build();
conf.set("sarg.pushdown",toKryo(sarg));
conf.set("hive.io.file.readcolumn.names","z,r");
SerDeUtils.initializeSerDe(serde,conf,properties,null);
inspector=(StructObjectInspector)serde.getObjectInspector();
InputFormat,?> in=new OrcInputFormat();
FileInputFormat.setInputPaths(conf,testFilePath.toString());
InputSplit[] splits=in.getSplits(conf,1);
assertEquals(0,splits.length);
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier
/**
* Test vectorization, non-acid, non-combine.
* @throws Exception
*/
@Test public void testVectorization() throws Exception {
StructObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=(StructObjectInspector)ObjectInspectorFactory.getReflectionObjectInspector(MyRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
JobConf conf=createMockExecutionEnvironment(workDir,new Path("mock:///"),"vectorization",inspector,true,1);
Writer writer=OrcFile.createWriter(new Path(conf.get("mapred.input.dir") + "/0_0"),OrcFile.writerOptions(conf).blockPadding(false).bufferSize(1024).inspector(inspector));
for (int i=0; i < 10; ++i) {
writer.addRow(new MyRow(i,2 * i));
}
writer.close();
((MockOutputStream)((WriterImpl)writer).getStream()).setBlocks(new MockBlock("host0","host1"));
HiveInputFormat,?> inputFormat=new HiveInputFormat();
InputSplit[] splits=inputFormat.getSplits(conf,10);
assertEquals(1,splits.length);
org.apache.hadoop.mapred.RecordReader reader=inputFormat.getRecordReader(splits[0],conf,Reporter.NULL);
NullWritable key=reader.createKey();
VectorizedRowBatch value=reader.createValue();
assertEquals(true,reader.next(key,value));
assertEquals(10,value.count());
LongColumnVector col0=(LongColumnVector)value.cols[0];
for (int i=0; i < 10; i++) {
assertEquals("checking " + i,i,col0.vector[i]);
}
assertEquals(false,reader.next(key,value));
}
InternalCallVerifier EqualityVerifier
@Test public void testSetSearchArgument() throws Exception {
Reader.Options options=new Reader.Options();
List types=new ArrayList();
OrcProto.Type.Builder builder=OrcProto.Type.newBuilder();
builder.setKind(OrcProto.Type.Kind.STRUCT).addAllFieldNames(Arrays.asList("op","otid","bucket","rowid","ctid","row")).addAllSubtypes(Arrays.asList(1,2,3,4,5,6));
types.add(builder.build());
builder.clear().setKind(OrcProto.Type.Kind.INT);
types.add(builder.build());
types.add(builder.build());
types.add(builder.build());
types.add(builder.build());
types.add(builder.build());
builder.clear().setKind(OrcProto.Type.Kind.STRUCT).addAllFieldNames(Arrays.asList("url","purchase","cost","store")).addAllSubtypes(Arrays.asList(7,8,9,10));
types.add(builder.build());
builder.clear().setKind(OrcProto.Type.Kind.STRING);
types.add(builder.build());
builder.clear().setKind(OrcProto.Type.Kind.INT);
types.add(builder.build());
types.add(builder.build());
types.add(builder.build());
SearchArgument isNull=SearchArgumentFactory.newBuilder().startAnd().isNull("cost",PredicateLeaf.Type.LONG).end().build();
conf.set(ConvertAstToSearchArg.SARG_PUSHDOWN,toKryo(isNull));
conf.set(ColumnProjectionUtils.READ_COLUMN_NAMES_CONF_STR,"url,cost");
options.include(new boolean[]{true,true,false,true,false});
OrcInputFormat.setSearchArgument(options,types,conf,false);
String[] colNames=options.getColumnNames();
assertEquals(null,colNames[0]);
assertEquals("url",colNames[1]);
assertEquals(null,colNames[2]);
assertEquals("cost",colNames[3]);
assertEquals(null,colNames[4]);
SearchArgument arg=options.getSearchArgument();
List leaves=arg.getLeaves();
assertEquals("cost",leaves.get(0).getColumnName());
assertEquals(PredicateLeaf.Operator.IS_NULL,leaves.get(0).getOperator());
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testCombinationInputFormatWithAcid() throws Exception {
StructObjectInspector inspector;
final int PARTITIONS=2;
final int BUCKETS=3;
synchronized (TestOrcFile.class) {
inspector=(StructObjectInspector)ObjectInspectorFactory.getReflectionObjectInspector(MyRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
JobConf conf=createMockExecutionEnvironment(workDir,new Path("mock:///"),"combinationAcid",inspector,false,PARTITIONS);
Path[] partDir=new Path[PARTITIONS];
String[] paths=conf.getStrings("mapred.input.dir");
for (int p=0; p < PARTITIONS; ++p) {
partDir[p]=new Path(paths[p]);
}
OrcRecordUpdater writer=new OrcRecordUpdater(partDir[0],new AcidOutputFormat.Options(conf).maximumTransactionId(10).writingBase(true).bucket(0).inspector(inspector).finalDestination(partDir[0]));
for (int i=0; i < 10; ++i) {
writer.insert(10,new MyRow(i,2 * i));
}
WriterImpl baseWriter=(WriterImpl)writer.getWriter();
writer.close(false);
MockOutputStream outputStream=(MockOutputStream)baseWriter.getStream();
outputStream.setBlocks(new MockBlock("host1","host2"));
writer=new OrcRecordUpdater(partDir[0],new AcidOutputFormat.Options(conf).maximumTransactionId(10).writingBase(true).bucket(1).inspector(inspector).finalDestination(partDir[0]));
for (int i=10; i < 20; ++i) {
writer.insert(10,new MyRow(i,2 * i));
}
WriterImpl deltaWriter=(WriterImpl)writer.getWriter();
outputStream=(MockOutputStream)deltaWriter.getStream();
writer.close(false);
outputStream.setBlocks(new MockBlock("host1","host2"));
for (int bucket=0; bucket < BUCKETS; ++bucket) {
Writer orc=OrcFile.createWriter(new Path(partDir[1],"00000" + bucket + "_0"),OrcFile.writerOptions(conf).blockPadding(false).bufferSize(1024).inspector(inspector));
orc.addRow(new MyRow(1,2));
outputStream=(MockOutputStream)((WriterImpl)orc).getStream();
orc.close();
outputStream.setBlocks(new MockBlock("host3","host4"));
}
conf.setInt(hive_metastoreConstants.BUCKET_COUNT,BUCKETS);
HiveInputFormat,?> inputFormat=new CombineHiveInputFormat();
InputSplit[] splits=inputFormat.getSplits(conf,1);
assertEquals(3,splits.length);
HiveInputFormat.HiveInputSplit split=(HiveInputFormat.HiveInputSplit)splits[0];
assertEquals("org.apache.hadoop.hive.ql.io.orc.OrcInputFormat",split.inputFormatClassName());
assertEquals("mock:/combinationAcid/p=0/base_0000010/bucket_00000",split.getPath().toString());
assertEquals(0,split.getStart());
assertEquals(607,split.getLength());
split=(HiveInputFormat.HiveInputSplit)splits[1];
assertEquals("org.apache.hadoop.hive.ql.io.orc.OrcInputFormat",split.inputFormatClassName());
assertEquals("mock:/combinationAcid/p=0/base_0000010/bucket_00001",split.getPath().toString());
assertEquals(0,split.getStart());
assertEquals(629,split.getLength());
CombineHiveInputFormat.CombineHiveInputSplit combineSplit=(CombineHiveInputFormat.CombineHiveInputSplit)splits[2];
assertEquals(BUCKETS,combineSplit.getNumPaths());
for (int bucket=0; bucket < BUCKETS; ++bucket) {
assertEquals("mock:/combinationAcid/p=1/00000" + bucket + "_0",combineSplit.getPath(bucket).toString());
assertEquals(0,combineSplit.getOffset(bucket));
assertEquals(241,combineSplit.getLength(bucket));
}
String[] hosts=combineSplit.getLocations();
assertEquals(2,hosts.length);
}
EqualityVerifier ExceptionVerifier HybridVerifier
@Test(expected=RuntimeException.class) public void testSplitGenFailure() throws IOException {
Properties properties=new Properties();
HiveOutputFormat,?> outFormat=new OrcOutputFormat();
org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter writer=outFormat.getHiveRecordWriter(conf,testFilePath,MyRow.class,true,properties,Reporter.NULL);
writer.close(true);
InputFormat,?> in=new OrcInputFormat();
fs.setPermission(testFilePath,FsPermission.createImmutable((short)0333));
FileInputFormat.setInputPaths(conf,testFilePath.toString());
try {
in.getSplits(conf,1);
}
catch ( RuntimeException e) {
assertEquals(true,e.getMessage().contains("Permission denied"));
throw e;
}
}
EqualityVerifier
@Test public void testOverlap() throws Exception {
assertEquals(0,OrcInputFormat.SplitGenerator.getOverlap(100,100,200,100));
assertEquals(0,OrcInputFormat.SplitGenerator.getOverlap(0,1000,2000,100));
assertEquals(100,OrcInputFormat.SplitGenerator.getOverlap(1000,1000,1500,100));
assertEquals(250,OrcInputFormat.SplitGenerator.getOverlap(1000,250,500,2000));
assertEquals(100,OrcInputFormat.SplitGenerator.getOverlap(1000,1000,1900,1000));
assertEquals(500,OrcInputFormat.SplitGenerator.getOverlap(2000,1000,2500,2000));
}
APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void testCombinationInputFormat() throws Exception {
StructObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=(StructObjectInspector)ObjectInspectorFactory.getReflectionObjectInspector(MyRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
JobConf conf=createMockExecutionEnvironment(workDir,new Path("mock:///"),"combination",inspector,false,1);
Path partDir=new Path(conf.get("mapred.input.dir"));
Writer writer=OrcFile.createWriter(new Path(partDir,"0_0"),OrcFile.writerOptions(conf).blockPadding(false).bufferSize(1024).inspector(inspector));
for (int i=0; i < 10; ++i) {
writer.addRow(new MyRow(i,2 * i));
}
writer.close();
MockOutputStream outputStream=(MockOutputStream)((WriterImpl)writer).getStream();
outputStream.setBlocks(new MockBlock("host0","host1"));
int length0=outputStream.file.length;
writer=OrcFile.createWriter(new Path(partDir,"1_0"),OrcFile.writerOptions(conf).blockPadding(false).bufferSize(1024).inspector(inspector));
for (int i=10; i < 20; ++i) {
writer.addRow(new MyRow(i,2 * i));
}
writer.close();
outputStream=(MockOutputStream)((WriterImpl)writer).getStream();
outputStream.setBlocks(new MockBlock("host1","host2"));
HiveInputFormat,?> inputFormat=new CombineHiveInputFormat();
InputSplit[] splits=inputFormat.getSplits(conf,1);
assertEquals(1,splits.length);
CombineHiveInputFormat.CombineHiveInputSplit split=(CombineHiveInputFormat.CombineHiveInputSplit)splits[0];
assertEquals(2,split.getNumPaths());
assertEquals(partDir.toString() + "/0_0",split.getPath(0).toString());
assertEquals(partDir.toString() + "/1_0",split.getPath(1).toString());
assertEquals(length0,split.getLength(0));
assertEquals(outputStream.file.length,split.getLength(1));
assertEquals(0,split.getOffset(0));
assertEquals(0,split.getOffset(1));
assertTrue(3 >= split.getLocations().length);
org.apache.hadoop.mapred.RecordReader reader=inputFormat.getRecordReader(split,conf,Reporter.NULL);
CombineHiveKey key=reader.createKey();
OrcStruct value=reader.createValue();
for (int i=0; i < 20; i++) {
assertEquals(true,reader.next(key,value));
assertEquals(i,((IntWritable)value.getFieldValue(0)).get());
}
assertEquals(false,reader.next(key,value));
}
InternalCallVerifier EqualityVerifier
@Test public void testGetInputPaths() throws Exception {
conf.set("mapred.input.dir","a,b,c");
assertArrayEquals(new Path[]{new Path("a"),new Path("b"),new Path("c")},OrcInputFormat.getInputPaths(conf));
conf.set("mapred.input.dir","/a/b/c/d/e");
assertArrayEquals(new Path[]{new Path("/a/b/c/d/e")},OrcInputFormat.getInputPaths(conf));
conf.set("mapred.input.dir","/a/b/c\\,d,/e/f\\,g/h");
assertArrayEquals(new Path[]{new Path("/a/b/c,d"),new Path("/e/f,g/h")},OrcInputFormat.getInputPaths(conf));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier IdentityVerifier EqualityVerifier NullVerifier PublicFieldVerifier HybridVerifier
@Test public void testEtlCombinedStrategy() throws Exception {
conf.set(HiveConf.ConfVars.HIVE_ORC_SPLIT_STRATEGY.varname,"ETL");
conf.set(HiveConf.ConfVars.HIVE_ORC_SPLIT_DIRECTORY_BATCH_MS.varname,"1000000");
OrcInputFormat.Context context=new OrcInputFormat.Context(conf);
MockFileSystem fs=new MockFileSystem(conf,new MockFile("mock:/a/1/part-00",1000,new byte[0]),new MockFile("mock:/a/1/part-01",1000,new byte[0]),new MockFile("mock:/a/2/part-00",1000,new byte[0]),new MockFile("mock:/a/2/part-01",1000,new byte[0]),new MockFile("mock:/a/3/base_0/1",1000,new byte[0]),new MockFile("mock:/a/4/base_0/1",1000,new byte[0]),new MockFile("mock:/a/5/base_0/1",1000,new byte[0]),new MockFile("mock:/a/5/delta_0_25/1",1000,new byte[0]));
OrcInputFormat.CombinedCtx combineCtx=new OrcInputFormat.CombinedCtx();
SplitStrategy> ss=createOrCombineStrategy(context,fs,"mock:/a/1",combineCtx);
assertNull(ss);
assertTrue(combineCtx.combined instanceof OrcInputFormat.ETLSplitStrategy);
OrcInputFormat.ETLSplitStrategy etlSs=(OrcInputFormat.ETLSplitStrategy)combineCtx.combined;
assertEquals(2,etlSs.files.size());
assertTrue(etlSs.isOriginal);
assertEquals(1,etlSs.dirs.size());
ss=createOrCombineStrategy(context,fs,"mock:/a/2",combineCtx);
assertNull(ss);
assertTrue(combineCtx.combined instanceof OrcInputFormat.ETLSplitStrategy);
assertEquals(4,etlSs.files.size());
assertEquals(2,etlSs.dirs.size());
ss=createOrCombineStrategy(context,fs,"mock:/a/3",combineCtx);
assertSame(etlSs,ss);
assertEquals(4,etlSs.files.size());
assertEquals(2,etlSs.dirs.size());
assertTrue(combineCtx.combined instanceof OrcInputFormat.ETLSplitStrategy);
etlSs=(OrcInputFormat.ETLSplitStrategy)combineCtx.combined;
assertEquals(1,etlSs.files.size());
assertFalse(etlSs.isOriginal);
assertEquals(1,etlSs.dirs.size());
ss=createOrCombineStrategy(context,fs,"mock:/a/1",combineCtx);
assertTrue(ss instanceof OrcInputFormat.ETLSplitStrategy);
assertNotSame(etlSs,ss);
OrcInputFormat.ETLSplitStrategy rejectedEtlSs=(OrcInputFormat.ETLSplitStrategy)ss;
assertEquals(2,rejectedEtlSs.files.size());
assertEquals(1,rejectedEtlSs.dirs.size());
assertTrue(rejectedEtlSs.isOriginal);
assertEquals(1,etlSs.files.size());
assertEquals(1,etlSs.dirs.size());
ss=createOrCombineStrategy(context,fs,"mock:/a/4",combineCtx);
assertNull(ss);
assertTrue(combineCtx.combined instanceof OrcInputFormat.ETLSplitStrategy);
assertEquals(2,etlSs.files.size());
assertEquals(2,etlSs.dirs.size());
ss=createOrCombineStrategy(context,fs,"mock:/a/5",combineCtx);
assertTrue(ss instanceof OrcInputFormat.ETLSplitStrategy);
assertNotSame(etlSs,ss);
assertEquals(2,etlSs.files.size());
assertEquals(2,etlSs.dirs.size());
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testSplitGenerator() throws Exception {
long[] stripeSizes=new long[]{197,300,600,200,200,100,100,100,100,100};
MockFileSystem fs=new MockFileSystem(conf,new MockFile("mock:/a/file",500,createMockOrcFile(stripeSizes),new MockBlock("host1-1","host1-2","host1-3"),new MockBlock("host2-1","host0","host2-3"),new MockBlock("host0","host3-2","host3-3"),new MockBlock("host4-1","host4-2","host4-3"),new MockBlock("host5-1","host5-2","host5-3")));
HiveConf.setLongVar(conf,HiveConf.ConfVars.MAPREDMAXSPLITSIZE,300);
HiveConf.setLongVar(conf,HiveConf.ConfVars.MAPREDMINSPLITSIZE,200);
OrcInputFormat.Context context=new OrcInputFormat.Context(conf);
OrcInputFormat.SplitGenerator splitter=new OrcInputFormat.SplitGenerator(new OrcInputFormat.SplitInfo(context,fs,AcidUtils.createOriginalObj(null,fs.getFileStatus(new Path("/a/file"))),null,true,new ArrayList(),true,null,null));
List results=splitter.call();
OrcSplit result=results.get(0);
assertEquals(3,result.getStart());
assertEquals(497,result.getLength());
result=results.get(1);
assertEquals(500,result.getStart());
assertEquals(600,result.getLength());
result=results.get(2);
assertEquals(1100,result.getStart());
assertEquals(400,result.getLength());
result=results.get(3);
assertEquals(1500,result.getStart());
assertEquals(300,result.getLength());
result=results.get(4);
assertEquals(1800,result.getStart());
assertEquals(200,result.getLength());
HiveConf.setLongVar(conf,HiveConf.ConfVars.MAPREDMAXSPLITSIZE,0);
HiveConf.setLongVar(conf,HiveConf.ConfVars.MAPREDMINSPLITSIZE,0);
context=new OrcInputFormat.Context(conf);
splitter=new OrcInputFormat.SplitGenerator(new OrcInputFormat.SplitInfo(context,fs,AcidUtils.createOriginalObj(null,fs.getFileStatus(new Path("/a/file"))),null,true,new ArrayList(),true,null,null));
results=splitter.call();
for (int i=0; i < stripeSizes.length; ++i) {
assertEquals("checking stripe " + i + " size",stripeSizes[i],results.get(i).getLength());
}
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDefaultTypes() throws Exception {
Properties properties=new Properties();
properties.setProperty("columns","str,str2");
properties.setProperty("columns.types","string:string");
StructObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=(StructObjectInspector)ObjectInspectorFactory.getReflectionObjectInspector(StringRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
SerDe serde=new OrcSerde();
HiveOutputFormat,?> outFormat=new OrcOutputFormat();
org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter writer=outFormat.getHiveRecordWriter(conf,testFilePath,StringRow.class,true,properties,Reporter.NULL);
writer.write(serde.serialize(new StringRow("owen"),inspector));
writer.write(serde.serialize(new StringRow("beth"),inspector));
writer.write(serde.serialize(new StringRow("laurel"),inspector));
writer.write(serde.serialize(new StringRow("hazen"),inspector));
writer.write(serde.serialize(new StringRow("colin"),inspector));
writer.write(serde.serialize(new StringRow("miles"),inspector));
writer.close(true);
serde=new OrcSerde();
SerDeUtils.initializeSerDe(serde,conf,properties,null);
inspector=(StructObjectInspector)serde.getObjectInspector();
assertEquals("struct",inspector.getTypeName());
InputFormat,?> in=new OrcInputFormat();
FileInputFormat.setInputPaths(conf,testFilePath.toString());
InputSplit[] splits=in.getSplits(conf,1);
assertEquals(1,splits.length);
conf.set("columns",StringRow.getColumnNamesProperty());
conf.set("columns.types",StringRow.getColumnTypesProperty());
org.apache.hadoop.mapred.RecordReader reader=in.getRecordReader(splits[0],conf,Reporter.NULL);
Object key=reader.createKey();
Writable value=(Writable)reader.createValue();
List extends StructField> fields=inspector.getAllStructFieldRefs();
StringObjectInspector strInspector=(StringObjectInspector)fields.get(0).getFieldObjectInspector();
assertEquals(true,reader.next(key,value));
assertEquals("owen",strInspector.getPrimitiveJavaObject(inspector.getStructFieldData(value,fields.get(0))));
assertEquals(true,reader.next(key,value));
assertEquals("beth",strInspector.getPrimitiveJavaObject(inspector.getStructFieldData(value,fields.get(0))));
assertEquals(true,reader.next(key,value));
assertEquals("laurel",strInspector.getPrimitiveJavaObject(inspector.getStructFieldData(value,fields.get(0))));
assertEquals(true,reader.next(key,value));
assertEquals("hazen",strInspector.getPrimitiveJavaObject(inspector.getStructFieldData(value,fields.get(0))));
assertEquals(true,reader.next(key,value));
assertEquals("colin",strInspector.getPrimitiveJavaObject(inspector.getStructFieldData(value,fields.get(0))));
assertEquals(true,reader.next(key,value));
assertEquals("miles",strInspector.getPrimitiveJavaObject(inspector.getStructFieldData(value,fields.get(0))));
assertEquals(false,reader.next(key,value));
reader.close();
}
InternalCallVerifier EqualityVerifier
@Test public void testSplitEliminationNullStats() throws Exception {
Properties properties=new Properties();
StructObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=(StructObjectInspector)ObjectInspectorFactory.getReflectionObjectInspector(SimpleRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
SerDe serde=new OrcSerde();
OutputFormat,?> outFormat=new OrcOutputFormat();
conf.setInt("mapred.max.split.size",50);
RecordWriter writer=outFormat.getRecordWriter(fs,conf,testFilePath.toString(),Reporter.NULL);
writer.write(NullWritable.get(),serde.serialize(new SimpleRow(null),inspector));
writer.write(NullWritable.get(),serde.serialize(new SimpleRow(null),inspector));
writer.write(NullWritable.get(),serde.serialize(new SimpleRow(null),inspector));
writer.close(Reporter.NULL);
serde=new OrcSerde();
SearchArgument sarg=SearchArgumentFactory.newBuilder().startAnd().lessThan("z",PredicateLeaf.Type.STRING,new String("foo")).end().build();
conf.set("sarg.pushdown",toKryo(sarg));
conf.set("hive.io.file.readcolumn.names","z");
properties.setProperty("columns","z");
properties.setProperty("columns.types","string");
SerDeUtils.initializeSerDe(serde,conf,properties,null);
inspector=(StructObjectInspector)serde.getObjectInspector();
InputFormat,?> in=new OrcInputFormat();
FileInputFormat.setInputPaths(conf,testFilePath.toString());
InputSplit[] splits=in.getSplits(conf,1);
assertEquals(0,splits.length);
}
InternalCallVerifier EqualityVerifier
@Test public void testAddSplit() throws Exception {
MockFileSystem fs=new MockFileSystem(conf,new MockFile("mock:/a/file",500,createMockOrcFile(197,300,600,200,200,100,100,100,100,100),new MockBlock("host1-1","host1-2","host1-3"),new MockBlock("host2-1","host0","host2-3"),new MockBlock("host0","host3-2","host3-3"),new MockBlock("host4-1","host4-2","host4-3"),new MockBlock("host5-1","host5-2","host5-3")));
OrcInputFormat.Context context=new OrcInputFormat.Context(conf);
OrcInputFormat.SplitGenerator splitter=new OrcInputFormat.SplitGenerator(new OrcInputFormat.SplitInfo(context,fs,AcidUtils.createOriginalObj(null,fs.getFileStatus(new Path("/a/file"))),null,true,new ArrayList(),true,null,null));
OrcSplit result=splitter.createSplit(0,200,null);
assertEquals(0,result.getStart());
assertEquals(200,result.getLength());
assertEquals("mock:/a/file",result.getPath().toString());
String[] locs=result.getLocations();
assertEquals(3,locs.length);
assertEquals("host1-1",locs[0]);
assertEquals("host1-2",locs[1]);
assertEquals("host1-3",locs[2]);
result=splitter.createSplit(500,600,null);
locs=result.getLocations();
assertEquals(3,locs.length);
assertEquals("host2-1",locs[0]);
assertEquals("host0",locs[1]);
assertEquals("host2-3",locs[2]);
result=splitter.createSplit(0,2500,null);
locs=result.getLocations();
assertEquals(1,locs.length);
assertEquals("host0",locs[0]);
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier
/**
* Test vectorization, non-acid, non-combine.
* @throws Exception
*/
@Test public void testVectorizationWithBuckets() throws Exception {
StructObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=(StructObjectInspector)ObjectInspectorFactory.getReflectionObjectInspector(MyRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
JobConf conf=createMockExecutionEnvironment(workDir,new Path("mock:///"),"vectorBuckets",inspector,true,1);
Writer writer=OrcFile.createWriter(new Path(conf.get("mapred.input.dir") + "/0_0"),OrcFile.writerOptions(conf).blockPadding(false).bufferSize(1024).inspector(inspector));
for (int i=0; i < 10; ++i) {
writer.addRow(new MyRow(i,2 * i));
}
writer.close();
((MockOutputStream)((WriterImpl)writer).getStream()).setBlocks(new MockBlock("host0","host1"));
conf.setInt(hive_metastoreConstants.BUCKET_COUNT,3);
HiveInputFormat,?> inputFormat=new HiveInputFormat();
InputSplit[] splits=inputFormat.getSplits(conf,10);
assertEquals(1,splits.length);
org.apache.hadoop.mapred.RecordReader reader=inputFormat.getRecordReader(splits[0],conf,Reporter.NULL);
NullWritable key=reader.createKey();
VectorizedRowBatch value=reader.createValue();
assertEquals(true,reader.next(key,value));
assertEquals(10,value.count());
LongColumnVector col0=(LongColumnVector)value.cols[0];
for (int i=0; i < 10; i++) {
assertEquals("checking " + i,i,col0.vector[i]);
}
assertEquals(false,reader.next(key,value));
}
Class: org.apache.hadoop.hive.ql.io.orc.TestNewInputOutputFormat APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testNewInputFormat() throws Exception {
Job job=new Job(conf,"orc test");
job.setInputFormatClass(OrcNewInputFormat.class);
job.setJarByClass(TestNewInputOutputFormat.class);
job.setMapperClass(OrcTestMapper1.class);
job.setNumReduceTasks(0);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
FileInputFormat.addInputPath(job,new Path(HiveTestUtils.getFileFromClasspath("orc-file-11-format.orc")));
Path outputPath=new Path(workDir,"TestOrcFile." + testCaseName.getMethodName() + ".txt");
localFs.delete(outputPath,true);
FileOutputFormat.setOutputPath(job,outputPath);
boolean result=job.waitForCompletion(true);
assertTrue(result);
Path outputFilePath=new Path(outputPath,"part-m-00000");
assertTrue(localFs.exists(outputFilePath));
BufferedReader reader=new BufferedReader(new InputStreamReader(localFs.open(outputFilePath)));
int count=0;
String line;
String lastLine=null;
while ((line=reader.readLine()) != null) {
count++;
lastLine=line;
}
reader.close();
assertEquals(count,7500);
assertEquals(lastLine,"{true, 100, 2048, 65536," + " 9223372036854775807, 2.0, -5.0" + ", , bye, {[{1, bye}, {2, sigh}]}, [{100000000, cat},"+ " {-100000, in}, {1234, hat}],"+ " {chani={5, chani}, mauddib={1, mauddib}},"+ " 2000-03-12 15:00:01, 12345678.6547457}");
localFs.delete(outputPath,true);
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testNewOutputFormat() throws Exception {
int rownum=1000;
Path inputPath=new Path(workDir,"TestOrcFile." + testCaseName.getMethodName() + ".txt");
Path outputPath=new Path(workDir,"TestOrcFile." + testCaseName.getMethodName() + ".orc");
localFs.delete(outputPath,true);
PrintWriter pw=new PrintWriter(new OutputStreamWriter(localFs.create(inputPath)));
Random r=new Random(1000L);
boolean firstRow=true;
int firstIntValue=0;
String firstStringValue=null;
for (int i=0; i < rownum; i++) {
int intValue=r.nextInt();
String stringValue=UUID.randomUUID().toString();
if (firstRow) {
firstRow=false;
firstIntValue=intValue;
firstStringValue=stringValue;
}
pw.println(intValue + "," + stringValue);
}
pw.close();
Job job=new Job(conf,"orc test");
job.setOutputFormatClass(OrcNewOutputFormat.class);
job.setJarByClass(TestNewInputOutputFormat.class);
job.setMapperClass(OrcTestMapper2.class);
job.setNumReduceTasks(0);
job.setOutputKeyClass(NullWritable.class);
job.setOutputValueClass(Writable.class);
FileInputFormat.addInputPath(job,inputPath);
FileOutputFormat.setOutputPath(job,outputPath);
boolean result=job.waitForCompletion(true);
assertTrue(result);
Path outputFilePath=new Path(outputPath,"part-m-00000");
assertTrue(localFs.exists(outputFilePath));
Reader reader=OrcFile.createReader(outputFilePath,OrcFile.readerOptions(conf).filesystem(localFs));
assertTrue(reader.getNumberOfRows() == rownum);
assertEquals(reader.getCompression(),CompressionKind.ZLIB);
StructObjectInspector soi=(StructObjectInspector)reader.getObjectInspector();
StructTypeInfo ti=(StructTypeInfo)TypeInfoUtils.getTypeInfoFromObjectInspector(soi);
assertEquals(((PrimitiveTypeInfo)ti.getAllStructFieldTypeInfos().get(0)).getPrimitiveCategory(),PrimitiveObjectInspector.PrimitiveCategory.INT);
assertEquals(((PrimitiveTypeInfo)ti.getAllStructFieldTypeInfos().get(1)).getPrimitiveCategory(),PrimitiveObjectInspector.PrimitiveCategory.STRING);
RecordReader rows=reader.rows();
Object row=rows.next(null);
IntWritable intWritable=(IntWritable)soi.getStructFieldData(row,soi.getAllStructFieldRefs().get(0));
Text text=(Text)soi.getStructFieldData(row,soi.getAllStructFieldRefs().get(1));
assertEquals(intWritable.get(),firstIntValue);
assertEquals(text.toString(),firstStringValue);
localFs.delete(outputPath,true);
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testNewInputFormatPruning() throws Exception {
conf.set("hive.io.file.read.all.columns","false");
conf.set("hive.io.file.readcolumn.ids","1,3");
Job job=new Job(conf,"orc test");
job.setInputFormatClass(OrcNewInputFormat.class);
job.setJarByClass(TestNewInputOutputFormat.class);
job.setMapperClass(OrcTestMapper1.class);
job.setNumReduceTasks(0);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
FileInputFormat.addInputPath(job,new Path(HiveTestUtils.getFileFromClasspath("orc-file-11-format.orc")));
Path outputPath=new Path(workDir,"TestOrcFile." + testCaseName.getMethodName() + ".txt");
localFs.delete(outputPath,true);
FileOutputFormat.setOutputPath(job,outputPath);
boolean result=job.waitForCompletion(true);
assertTrue(result);
Path outputFilePath=new Path(outputPath,"part-m-00000");
BufferedReader reader=new BufferedReader(new InputStreamReader(localFs.open(outputFilePath)));
String line=reader.readLine();
assertEquals(line,"{null, 1, null, 65536, null, null, null, " + "null, null, null, null, null, null, null}");
localFs.delete(outputPath,true);
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testNewOutputFormatWithCompression() throws Exception {
conf.set("hive.exec.orc.default.compress","SNAPPY");
Path inputPath=new Path(workDir,"TestOrcFile." + testCaseName.getMethodName() + ".txt");
Path outputPath=new Path(workDir,"TestOrcFile." + testCaseName.getMethodName() + ".orc");
localFs.delete(outputPath,true);
PrintWriter pw=new PrintWriter(new OutputStreamWriter(localFs.create(inputPath)));
pw.println("1,hello");
pw.println("2,world");
pw.close();
Job job=new Job(conf,"orc test");
job.setOutputFormatClass(OrcNewOutputFormat.class);
job.setJarByClass(TestNewInputOutputFormat.class);
job.setMapperClass(OrcTestMapper2.class);
job.setNumReduceTasks(0);
job.setOutputKeyClass(NullWritable.class);
job.setOutputValueClass(OrcSerdeRow.class);
FileInputFormat.addInputPath(job,inputPath);
FileOutputFormat.setOutputPath(job,outputPath);
boolean result=job.waitForCompletion(true);
assertTrue(result);
Path outputFilePath=new Path(outputPath,"part-m-00000");
Reader reader=OrcFile.createReader(outputFilePath,OrcFile.readerOptions(conf).filesystem(localFs));
assertEquals(reader.getCompression(),CompressionKind.SNAPPY);
localFs.delete(outputPath,true);
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@SuppressWarnings("unchecked") @Test public void testNewOutputFormatComplex() throws Exception {
Path inputPath=new Path(workDir,"TestOrcFile." + testCaseName.getMethodName() + ".txt");
Path outputPath=new Path(workDir,"TestOrcFile." + testCaseName.getMethodName() + ".orc");
localFs.delete(outputPath,true);
PrintWriter pw=new PrintWriter(new OutputStreamWriter(localFs.create(inputPath)));
pw.println("I have eaten");
pw.println("the plums");
pw.println("that were in");
pw.println("the icebox");
pw.println("and which");
pw.println("you were probably");
pw.println("saving");
pw.println("for breakfast");
pw.println("Forgive me");
pw.println("they were delicious");
pw.println("so sweet");
pw.println("and so cold");
pw.close();
Job job=new Job(conf,"orc test");
job.setOutputFormatClass(OrcNewOutputFormat.class);
job.setJarByClass(TestNewInputOutputFormat.class);
job.setMapperClass(OrcTestMapper3.class);
job.setReducerClass(OrcTestReducer3.class);
job.setMapOutputKeyClass(IntWritable.class);
job.setMapOutputValueClass(Text.class);
job.setOutputKeyClass(NullWritable.class);
job.setOutputValueClass(OrcSerdeRow.class);
FileInputFormat.addInputPath(job,inputPath);
FileOutputFormat.setOutputPath(job,outputPath);
boolean result=job.waitForCompletion(true);
assertTrue(result);
Path outputFilePath=new Path(outputPath,"part-r-00000");
Reader reader=OrcFile.createReader(outputFilePath,OrcFile.readerOptions(conf).filesystem(localFs));
RecordReader rows=reader.rows();
ObjectInspector orcOi=reader.getObjectInspector();
ObjectInspector stoi=TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(OrcTestReducer3.typeInfo);
ObjectInspectorConverters.Converter converter=ObjectInspectorConverters.getConverter(orcOi,stoi);
Object row=rows.next(null);
List converted=(List)converter.convert(row);
assertEquals(1,converted.get(0));
assertEquals(1,converted.get(1));
List list=(List)converted.get(2);
assertEquals(list.size(),1);
assertEquals("saving",((List)list.get(0)).get(0));
assertEquals(6,((List)list.get(0)).get(1));
Map map=(Map)converted.get(3);
assertEquals(map.size(),1);
assertEquals(map.get("saving"),new Integer(1));
row=rows.next(null);
converted=(List)converter.convert(row);
assertEquals(2,converted.get(0));
assertEquals(6,converted.get(1));
list=(List)converted.get(2);
assertEquals(list.size(),6);
assertEquals("breakfast",((List)list.get(0)).get(0));
assertEquals(9,((List)list.get(0)).get(1));
map=(Map)converted.get(3);
assertEquals(map.size(),11);
assertEquals(map.get("the"),new Integer(2));
row=rows.next(null);
converted=(List)converter.convert(row);
assertEquals(3,converted.get(0));
assertEquals(5,converted.get(1));
list=(List)converted.get(2);
assertEquals(list.size(),5);
assertEquals("cold",((List)list.get(0)).get(0));
assertEquals(4,((List)list.get(0)).get(1));
map=(Map)converted.get(3);
assertEquals(map.size(),13);
assertEquals(map.get("were"),new Integer(3));
assertFalse(rows.hasNext());
localFs.delete(outputPath,true);
}
Class: org.apache.hadoop.hive.ql.io.orc.TestNewIntegerEncoding APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testDeltaOverflow3() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
long[] inp=new long[]{-4513343538618202711l,-2911390882471569739l,-2,Long.MAX_VALUE};
List input=Lists.newArrayList(Longs.asList(inp));
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testSeek() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
List input=Lists.newArrayList();
Random rand=new Random();
for (int i=0; i < 100000; i++) {
input.add((long)rand.nextInt());
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).compress(CompressionKind.NONE).stripeSize(100000).bufferSize(10000).version(OrcFile.Version.V_0_11).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=55555;
rows.seekToRow(idx);
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testPatchedBaseNegativeMin4() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
long[] inp=new long[]{13,13,11,8,13,10,10,11,11,14,11,7,13,12,12,11,15,12,12,9,8,10,13,11,8,6,5,6,11,7,15,10,7,6,8,7,9,9,11,33,11,3,7,4,6,10,14,12,5,14,7,6};
List input=Lists.newArrayList(Longs.asList(inp));
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testPatchedBaseMax3() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
List input=Lists.newArrayList();
input.add(371946367L);
input.add(11963367L);
input.add(68639400007L);
input.add(100233367L);
input.add(6367L);
input.add(10026367L);
input.add(3670000L);
input.add(3602367L);
input.add(4719226367L);
input.add(7196367L);
input.add(444442L);
input.add(210267L);
input.add(21033L);
input.add(160267L);
input.add(400267L);
input.add(23634347L);
input.add(16027L);
input.add(46026367L);
input.add(Long.MAX_VALUE);
input.add(33333L);
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testPatchedBaseNegativeMin() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
long[] inp=new long[]{20,2,3,2,1,3,17,71,35,2,1,139,2,2,3,1783,475,2,1,1,3,1,3,2,32,1,2,3,1,8,30,1,3,414,1,1,135,3,3,1,414,2,1,2,2,594,2,5,6,4,11,1,2,2,1,1,52,4,1,2,7,1,17,334,1,2,1,2,2,6,1,266,1,2,217,2,6,2,13,2,2,1,2,3,5,1,2,1,7244,11813,1,33,2,-13,1,2,3,13,1,92,3,13,5,14,9,141,12,6,15,25,1,1,1,46,2,1,1,141,3,1,1,1,1,2,1,4,34,5,78,8,1,2,2,1,9,10,2,1,4,13,1,5,4,4,19,5,1,1,1,68,33,399,1,1885,25,5,2,4,1,1,2,16,1,2966,3,1,1,25501,1,1,1,66,1,3,8,131,14,5,1,2,2,1,1,8,1,1,2,1,5,9,2,3,112,13,2,2,1,5,10,3,1,1,13,2,3,4,1,3,1,1,2,1,1,2,4,2,207,1,1,2,4,3,3,2,2,16};
List input=Lists.newArrayList(Longs.asList(inp));
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testPatchedBaseNegativeMin3() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
long[] inp=new long[]{20,2,3,2,1,3,17,71,35,2,1,139,2,2,3,1783,475,2,1,1,3,1,3,2,32,1,2,3,1,8,30,1,3,414,1,1,135,3,3,1,414,2,1,2,2,594,2,5,6,4,11,1,2,2,1,1,52,4,1,2,7,1,17,334,1,2,1,2,2,6,1,266,1,2,217,2,6,2,13,2,2,1,2,3,5,1,2,1,7244,11813,1,33,2,0,1,2,3,13,1,92,3,13,5,14,9,141,12,6,15,25,1,1,1,46,2,1,1,141,3,1,1,1,1,2,1,4,34,5,78,8,1,2,2,1,9,10,2,1,4,13,1,5,4,4,19,5,1,1,1,68,33,399,1,1885,25,5,2,4,1,1,2,16,1,2966,3,1,1,25501,1,1,1,66,1,3,8,131,14,5,1,2,2,1,1,8,1,1,2,1,5,9,2,3,112,13,2,2,1,5,10,3,1,1,13,2,3,4,1,3,1,1,2,1,1,2,4,2,207,1,1,2,4,3,3,2,2,16};
List input=Lists.newArrayList(Longs.asList(inp));
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testPatchedBaseMax2() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
List input=Lists.newArrayList();
Random rand=new Random();
for (int i=0; i < 5120; i++) {
input.add((long)rand.nextInt(60));
}
input.set(128,Long.MAX_VALUE);
input.set(256,Long.MAX_VALUE);
input.set(511,Long.MAX_VALUE);
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testRandomInt() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
List input=Lists.newArrayList();
Random rand=new Random();
for (int i=0; i < 100000; i++) {
input.add((long)rand.nextInt());
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testBasicDelta1() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
long[] inp=new long[]{-500,-400,-350,-325,-310};
List input=Lists.newArrayList(Longs.asList(inp));
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testIntegerMax() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
List input=Lists.newArrayList();
input.add((long)Integer.MAX_VALUE);
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testPatchedBaseAt0() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
List input=Lists.newArrayList();
Random rand=new Random();
for (int i=0; i < 5120; i++) {
input.add((long)rand.nextInt(100));
}
input.set(0,20000L);
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testPatchedBaseNegativeMin2() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
long[] inp=new long[]{20,2,3,2,1,3,17,71,35,2,1,139,2,2,3,1783,475,2,1,1,3,1,3,2,32,1,2,3,1,8,30,1,3,414,1,1,135,3,3,1,414,2,1,2,2,594,2,5,6,4,11,1,2,2,1,1,52,4,1,2,7,1,17,334,1,2,1,2,2,6,1,266,1,2,217,2,6,2,13,2,2,1,2,3,5,1,2,1,7244,11813,1,33,2,-1,1,2,3,13,1,92,3,13,5,14,9,141,12,6,15,25,1,1,1,46,2,1,1,141,3,1,1,1,1,2,1,4,34,5,78,8,1,2,2,1,9,10,2,1,4,13,1,5,4,4,19,5,1,1,1,68,33,399,1,1885,25,5,2,4,1,1,2,16,1,2966,3,1,1,25501,1,1,1,66,1,3,8,131,14,5,1,2,2,1,1,8,1,1,2,1,5,9,2,3,112,13,2,2,1,5,10,3,1,1,13,2,3,4,1,3,1,1,2,1,1,2,4,2,207,1,1,2,4,3,3,2,2,16};
List input=Lists.newArrayList(Longs.asList(inp));
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testLongMin() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
List input=Lists.newArrayList();
input.add(Long.MIN_VALUE);
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testBasicRow() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Row.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000).encodingStrategy(encodingStrategy));
writer.addRow(new Row(111,1111L));
writer.addRow(new Row(111,1111L));
writer.addRow(new Row(111,1111L));
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(new IntWritable(111),((OrcStruct)row).getFieldValue(0));
assertEquals(new LongWritable(1111),((OrcStruct)row).getFieldValue(1));
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testIntegerMin() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
List input=Lists.newArrayList();
input.add((long)Integer.MIN_VALUE);
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testPatchedBaseTimestamp() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(TSRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000).encodingStrategy(encodingStrategy));
List tslist=Lists.newArrayList();
tslist.add(Timestamp.valueOf("2099-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("2003-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("1999-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("1995-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("2002-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("2010-03-02 00:00:00"));
tslist.add(Timestamp.valueOf("2005-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("2006-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("2003-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("1996-08-02 00:00:00"));
tslist.add(Timestamp.valueOf("1998-11-02 00:00:00"));
tslist.add(Timestamp.valueOf("2008-10-02 00:00:00"));
tslist.add(Timestamp.valueOf("1993-08-02 00:00:00"));
tslist.add(Timestamp.valueOf("2008-01-02 00:00:00"));
tslist.add(Timestamp.valueOf("2007-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("2004-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("2008-10-02 00:00:00"));
tslist.add(Timestamp.valueOf("2003-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("2004-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("2008-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("2005-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("1994-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("2006-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("2004-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("2001-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("2000-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("2000-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("2002-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("2006-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("2011-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("2002-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("2005-01-01 00:00:00"));
tslist.add(Timestamp.valueOf("1974-01-01 00:00:00"));
for ( Timestamp ts : tslist) {
writer.addRow(new TSRow(ts));
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(tslist.get(idx++).getNanos(),((TimestampWritable)((OrcStruct)row).getFieldValue(0)).getNanos());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testPatchedBaseAt1() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
List input=Lists.newArrayList();
Random rand=new Random();
for (int i=0; i < 5120; i++) {
input.add((long)rand.nextInt(100));
}
input.set(1,20000L);
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testPatchedBaseMax4() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
List input=Lists.newArrayList();
for (int i=0; i < 25; i++) {
input.add(371292224226367L);
input.add(119622332222267L);
input.add(686329400222007L);
input.add(100233333222367L);
input.add(636272333322222L);
input.add(10202633223267L);
input.add(36700222022230L);
input.add(36023226224227L);
input.add(47192226364427L);
input.add(71963622222447L);
input.add(22244444222222L);
input.add(21220263327442L);
input.add(21032233332232L);
input.add(16026322232227L);
input.add(40022262272212L);
input.add(23634342227222L);
input.add(16022222222227L);
input.add(46026362222227L);
input.add(46026362222227L);
input.add(33322222222323L);
}
input.add(Long.MAX_VALUE);
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testLongMax() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
List input=Lists.newArrayList();
input.add(Long.MAX_VALUE);
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testBasicDelta3() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
long[] inp=new long[]{500,400,350,325,310};
List input=Lists.newArrayList(Longs.asList(inp));
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testRandomLong() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
List input=Lists.newArrayList();
Random rand=new Random();
for (int i=0; i < 100000; i++) {
input.add(rand.nextLong());
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testPatchedBaseAt256() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
List input=Lists.newArrayList();
Random rand=new Random();
for (int i=0; i < 5120; i++) {
input.add((long)rand.nextInt(100));
}
input.set(256,20000L);
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testDeltaOverflow() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
long[] inp=new long[]{4513343538618202719l,4513343538618202711l,2911390882471569739l,-9181829309989854913l};
List input=Lists.newArrayList(Longs.asList(inp));
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testBasicDelta2() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
long[] inp=new long[]{-500,-600,-650,-675,-710};
List input=Lists.newArrayList(Longs.asList(inp));
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testPatchedBaseAt255() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
List input=Lists.newArrayList();
Random rand=new Random();
for (int i=0; i < 5120; i++) {
input.add((long)rand.nextInt(100));
}
input.set(255,20000L);
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testPatchedBaseMax1() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
List input=Lists.newArrayList();
Random rand=new Random();
for (int i=0; i < 5120; i++) {
input.add((long)rand.nextInt(60));
}
input.set(511,Long.MAX_VALUE);
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testPatchedBase510() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
List input=Lists.newArrayList();
Random rand=new Random();
for (int i=0; i < 5120; i++) {
input.add((long)rand.nextInt(100));
}
input.set(510,20000L);
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDirectLargeNegatives() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000).encodingStrategy(encodingStrategy));
writer.addRow(-7486502418706614742L);
writer.addRow(0L);
writer.addRow(1L);
writer.addRow(1L);
writer.addRow(-5535739865598783616L);
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
Object row=rows.next(null);
assertEquals(-7486502418706614742L,((LongWritable)row).get());
row=rows.next(row);
assertEquals(0L,((LongWritable)row).get());
row=rows.next(row);
assertEquals(1L,((LongWritable)row).get());
row=rows.next(row);
assertEquals(1L,((LongWritable)row).get());
row=rows.next(row);
assertEquals(-5535739865598783616L,((LongWritable)row).get());
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testBasicNew() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
long[] inp=new long[]{1,1,1,1,1,1,1,1,1,1,1,2,3,4,5,6,7,8,9,10,1,1,1,1,1,1,10,9,7,6,5,4,3,2,1,1,1,1,1,2,5,1,3,7,1,9,2,6,3,7,1,9,2,6,3,7,1,9,2,6,3,7,1,9,2,6,3,7,1,9,2,6,2000,2,1,1,1,1,1,3,7,1,9,2,6,1,1,1,1,1};
List input=Lists.newArrayList(Longs.asList(inp));
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testDeltaOverflow2() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
long[] inp=new long[]{Long.MAX_VALUE,4513343538618202711l,2911390882471569739l,Long.MIN_VALUE};
List input=Lists.newArrayList(Longs.asList(inp));
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testBasicDelta4() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
long[] inp=new long[]{500,600,650,675,710};
List input=Lists.newArrayList(Longs.asList(inp));
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testBasicOld() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
long[] inp=new long[]{1,1,1,1,1,1,1,1,1,1,1,2,3,4,5,6,7,8,9,10,1,1,1,1,1,1,10,9,7,6,5,4,3,2,1,1,1,1,1,2,5,1,3,7,1,9,2,6,3,7,1,9,2,6,3,7,1,9,2,6,3,7,1,9,2,6,3,7,1,9,2,6,2000,2,1,1,1,1,1,3,7,1,9,2,6,1,1,1,1,1};
List input=Lists.newArrayList(Longs.asList(inp));
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).compress(CompressionKind.NONE).version(OrcFile.Version.V_0_11).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testPatchedBase511() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
List input=Lists.newArrayList();
Random rand=new Random();
for (int i=0; i < 5120; i++) {
input.add((long)rand.nextInt(100));
}
input.set(511,20000L);
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000).encodingStrategy(encodingStrategy));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
Class: org.apache.hadoop.hive.ql.io.orc.TestOrcFile APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testStringAndBinaryStatistics() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(SimpleStruct.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000));
writer.addRow(new SimpleStruct(bytes(0,1,2,3,4),"foo"));
writer.addRow(new SimpleStruct(bytes(0,1,2,3),"bar"));
writer.addRow(new SimpleStruct(bytes(0,1,2,3,4,5),null));
writer.addRow(new SimpleStruct(null,"hi"));
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
TypeDescription schema=writer.getSchema();
assertEquals(2,schema.getMaximumId());
boolean[] expected=new boolean[]{false,false,true};
boolean[] included=OrcUtils.includeColumns("string1",schema);
assertEquals(true,Arrays.equals(expected,included));
expected=new boolean[]{false,false,false};
included=OrcUtils.includeColumns("",schema);
assertEquals(true,Arrays.equals(expected,included));
expected=new boolean[]{false,false,false};
included=OrcUtils.includeColumns(null,schema);
assertEquals(true,Arrays.equals(expected,included));
ColumnStatistics[] stats=reader.getStatistics();
assertEquals(4,stats[0].getNumberOfValues());
assertEquals("count: 4 hasNull: false",stats[0].toString());
assertEquals(3,stats[1].getNumberOfValues());
assertEquals(15,((BinaryColumnStatistics)stats[1]).getSum());
assertEquals("count: 3 hasNull: true sum: 15",stats[1].toString());
assertEquals(3,stats[2].getNumberOfValues());
assertEquals("bar",((StringColumnStatistics)stats[2]).getMinimum());
assertEquals("hi",((StringColumnStatistics)stats[2]).getMaximum());
assertEquals(8,((StringColumnStatistics)stats[2]).getSum());
assertEquals("count: 3 hasNull: true min: bar max: hi sum: 8",stats[2].toString());
StructObjectInspector readerInspector=(StructObjectInspector)reader.getObjectInspector();
assertEquals(ObjectInspector.Category.STRUCT,readerInspector.getCategory());
assertEquals("struct",readerInspector.getTypeName());
List extends StructField> fields=readerInspector.getAllStructFieldRefs();
BinaryObjectInspector bi=(BinaryObjectInspector)readerInspector.getStructFieldRef("bytes1").getFieldObjectInspector();
StringObjectInspector st=(StringObjectInspector)readerInspector.getStructFieldRef("string1").getFieldObjectInspector();
RecordReader rows=reader.rows();
Object row=rows.next(null);
assertNotNull(row);
assertEquals(bytes(0,1,2,3,4),bi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,fields.get(0))));
assertEquals("foo",st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(1))));
assertEquals(true,rows.hasNext());
row=rows.next(row);
assertEquals(bytes(0,1,2,3),bi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,fields.get(0))));
assertEquals("bar",st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(1))));
assertEquals(true,rows.hasNext());
row=rows.next(row);
assertEquals(bytes(0,1,2,3,4,5),bi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,fields.get(0))));
assertNull(st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(1))));
assertEquals(true,rows.hasNext());
row=rows.next(row);
assertNull(bi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,fields.get(0))));
assertEquals("hi",st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(1))));
assertEquals(false,rows.hasNext());
rows.close();
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testMemoryManagementV12() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(InnerStruct.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
MyMemoryManager memory=new MyMemoryManager(conf,10000,0.1);
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).compress(CompressionKind.NONE).stripeSize(50000).bufferSize(100).rowIndexStride(0).memory(memory).batchSize(100).version(OrcFile.Version.V_0_12));
assertEquals(testFilePath,memory.path);
for (int i=0; i < 2500; ++i) {
writer.addRow(new InnerStruct(i * 300,Integer.toHexString(10 * i)));
}
writer.close();
assertEquals(null,memory.path);
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
int i=0;
for ( StripeInformation stripe : reader.getStripes()) {
i+=1;
assertTrue("stripe " + i + " is too long at "+ stripe.getDataLength(),stripe.getDataLength() < 5000);
}
assertEquals(3,i);
assertEquals(2500,reader.getNumberOfRows());
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
/**
* Read and write a randomly generated snappy file.
* @throws Exception
*/
@Test public void testSnappy() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(InnerStruct.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(1000).compress(CompressionKind.SNAPPY).bufferSize(100));
Random rand=new Random(12);
for (int i=0; i < 10000; ++i) {
writer.addRow(new InnerStruct(rand.nextInt(),Integer.toHexString(rand.nextInt())));
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
rand=new Random(12);
OrcStruct row=null;
for (int i=0; i < 10000; ++i) {
assertEquals(true,rows.hasNext());
row=(OrcStruct)rows.next(row);
assertEquals(rand.nextInt(),((IntWritable)row.getFieldValue(0)).get());
assertEquals(Integer.toHexString(rand.nextInt()),row.getFieldValue(1).toString());
}
assertEquals(false,rows.hasNext());
rows.close();
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testTimestamp() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Timestamp.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000).version(OrcFile.Version.V_0_11));
List tslist=Lists.newArrayList();
tslist.add(Timestamp.valueOf("2037-01-01 00:00:00.000999"));
tslist.add(Timestamp.valueOf("2003-01-01 00:00:00.000000222"));
tslist.add(Timestamp.valueOf("1999-01-01 00:00:00.999999999"));
tslist.add(Timestamp.valueOf("1995-01-01 00:00:00.688888888"));
tslist.add(Timestamp.valueOf("2002-01-01 00:00:00.1"));
tslist.add(Timestamp.valueOf("2010-03-02 00:00:00.000009001"));
tslist.add(Timestamp.valueOf("2005-01-01 00:00:00.000002229"));
tslist.add(Timestamp.valueOf("2006-01-01 00:00:00.900203003"));
tslist.add(Timestamp.valueOf("2003-01-01 00:00:00.800000007"));
tslist.add(Timestamp.valueOf("1996-08-02 00:00:00.723100809"));
tslist.add(Timestamp.valueOf("1998-11-02 00:00:00.857340643"));
tslist.add(Timestamp.valueOf("2008-10-02 00:00:00"));
for ( Timestamp ts : tslist) {
writer.addRow(ts);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows(null);
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(tslist.get(idx++).getNanos(),((TimestampWritable)row).getNanos());
}
assertEquals(0,writer.getSchema().getMaximumId());
boolean[] expected=new boolean[]{false};
boolean[] included=OrcUtils.includeColumns("",writer.getSchema());
assertEquals(true,Arrays.equals(expected,included));
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testZeroCopySeek() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(BigRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(200000).bufferSize(65536).rowIndexStride(1000));
Random rand=new Random(42);
final int COUNT=32768;
long[] intValues=new long[COUNT];
double[] doubleValues=new double[COUNT];
String[] stringValues=new String[COUNT];
BytesWritable[] byteValues=new BytesWritable[COUNT];
String[] words=new String[128];
for (int i=0; i < words.length; ++i) {
words[i]=Integer.toHexString(rand.nextInt());
}
for (int i=0; i < COUNT / 2; ++i) {
intValues[2 * i]=rand.nextLong();
intValues[2 * i + 1]=intValues[2 * i];
stringValues[2 * i]=words[rand.nextInt(words.length)];
stringValues[2 * i + 1]=stringValues[2 * i];
}
for (int i=0; i < COUNT; ++i) {
doubleValues[i]=rand.nextDouble();
byte[] buf=new byte[20];
rand.nextBytes(buf);
byteValues[i]=new BytesWritable(buf);
}
for (int i=0; i < COUNT; ++i) {
writer.addRow(createRandomRow(intValues,doubleValues,stringValues,byteValues,words,i));
}
writer.close();
writer=null;
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
assertEquals(COUNT,reader.getNumberOfRows());
Configuration conf=new Configuration();
HiveConf.setBoolVar(conf,HiveConf.ConfVars.HIVE_ORC_ZEROCOPY,true);
RecordReader rows=reader.rows();
OrcStruct row=null;
for (int i=COUNT - 1; i >= 0; --i) {
rows.seekToRow(i);
row=(OrcStruct)rows.next(row);
BigRow expected=createRandomRow(intValues,doubleValues,stringValues,byteValues,words,i);
assertEquals(expected.boolean1.booleanValue(),((BooleanWritable)row.getFieldValue(0)).get());
assertEquals(expected.byte1.byteValue(),((ByteWritable)row.getFieldValue(1)).get());
assertEquals(expected.short1.shortValue(),((ShortWritable)row.getFieldValue(2)).get());
assertEquals(expected.int1.intValue(),((IntWritable)row.getFieldValue(3)).get());
assertEquals(expected.long1.longValue(),((LongWritable)row.getFieldValue(4)).get());
assertEquals(expected.float1.floatValue(),((FloatWritable)row.getFieldValue(5)).get(),0.0001);
assertEquals(expected.double1.doubleValue(),((DoubleWritable)row.getFieldValue(6)).get(),0.0001);
assertEquals(expected.bytes1,row.getFieldValue(7));
assertEquals(expected.string1,row.getFieldValue(8));
List expectedList=expected.middle.list;
List actualList=(List)((OrcStruct)row.getFieldValue(9)).getFieldValue(0);
compareList(expectedList,actualList);
compareList(expected.list,(List)row.getFieldValue(10));
}
rows.close();
Iterator stripeIterator=reader.getStripes().iterator();
long offsetOfStripe2=0;
long offsetOfStripe4=0;
long lastRowOfStripe2=0;
for (int i=0; i < 5; ++i) {
StripeInformation stripe=stripeIterator.next();
if (i < 2) {
lastRowOfStripe2+=stripe.getNumberOfRows();
}
else if (i == 2) {
offsetOfStripe2=stripe.getOffset();
lastRowOfStripe2+=stripe.getNumberOfRows() - 1;
}
else if (i == 4) {
offsetOfStripe4=stripe.getOffset();
}
}
boolean[] columns=new boolean[reader.getStatistics().length];
columns[5]=true;
columns[9]=true;
rows=reader.rowsOptions(new Reader.Options().range(offsetOfStripe2,offsetOfStripe4 - offsetOfStripe2).include(columns));
rows.seekToRow(lastRowOfStripe2);
for (int i=0; i < 2; ++i) {
row=(OrcStruct)rows.next(row);
BigRow expected=createRandomRow(intValues,doubleValues,stringValues,byteValues,words,(int)(lastRowOfStripe2 + i));
assertEquals(expected.long1.longValue(),((LongWritable)row.getFieldValue(4)).get());
assertEquals(expected.string1,row.getFieldValue(8));
}
rows.close();
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testBitPack64Large() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
int size=1080832;
long[] inp=new long[size];
Random rand=new Random(1234);
for (int i=0; i < size; i++) {
inp[i]=rand.nextLong();
}
List input=Lists.newArrayList(Longs.asList(inp));
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).compress(CompressionKind.ZLIB));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
Assert.assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testPredicatePushdown() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(InnerStruct.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(fs,testFilePath,conf,inspector,400000L,CompressionKind.NONE,500,1000);
for (int i=0; i < 3500; ++i) {
writer.addRow(new InnerStruct(i * 300,Integer.toHexString(10 * i)));
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
assertEquals(3500,reader.getNumberOfRows());
SearchArgument sarg=SearchArgumentFactory.newBuilder().startAnd().startNot().lessThan("int1",PredicateLeaf.Type.LONG,300000L).end().lessThan("int1",PredicateLeaf.Type.LONG,600000L).end().build();
RecordReader rows=reader.rowsOptions(new Reader.Options().range(0L,Long.MAX_VALUE).include(new boolean[]{true,true,true}).searchArgument(sarg,new String[]{null,"int1","string1"}));
assertEquals(1000L,rows.getRowNumber());
OrcStruct row=null;
for (int i=1000; i < 2000; ++i) {
assertTrue(rows.hasNext());
row=(OrcStruct)rows.next(row);
assertEquals(300 * i,((IntWritable)row.getFieldValue(0)).get());
assertEquals(Integer.toHexString(10 * i),row.getFieldValue(1).toString());
}
assertTrue(!rows.hasNext());
assertEquals(3500,rows.getRowNumber());
sarg=SearchArgumentFactory.newBuilder().startAnd().lessThan("int1",PredicateLeaf.Type.LONG,0L).end().build();
rows=reader.rowsOptions(new Reader.Options().range(0L,Long.MAX_VALUE).include(new boolean[]{true,true,true}).searchArgument(sarg,new String[]{null,"int1","string1"}));
assertEquals(3500L,rows.getRowNumber());
assertTrue(!rows.hasNext());
sarg=SearchArgumentFactory.newBuilder().startOr().lessThan("int1",PredicateLeaf.Type.LONG,300L * 100).startNot().lessThan("int1",PredicateLeaf.Type.LONG,300L * 3400).end().end().build();
rows=reader.rowsOptions(new Reader.Options().range(0L,Long.MAX_VALUE).include(new boolean[]{true,true,true}).searchArgument(sarg,new String[]{null,"int1","string1"}));
row=null;
for (int i=0; i < 1000; ++i) {
assertTrue(rows.hasNext());
assertEquals(i,rows.getRowNumber());
row=(OrcStruct)rows.next(row);
assertEquals(300 * i,((IntWritable)row.getFieldValue(0)).get());
assertEquals(Integer.toHexString(10 * i),row.getFieldValue(1).toString());
}
for (int i=3000; i < 3500; ++i) {
assertTrue(rows.hasNext());
assertEquals(i,rows.getRowNumber());
row=(OrcStruct)rows.next(row);
assertEquals(300 * i,((IntWritable)row.getFieldValue(0)).get());
assertEquals(Integer.toHexString(10 * i),row.getFieldValue(1).toString());
}
assertTrue(!rows.hasNext());
assertEquals(3500,rows.getRowNumber());
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testSeek() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(BigRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(200000).bufferSize(65536).rowIndexStride(1000));
Random rand=new Random(42);
final int COUNT=32768;
long[] intValues=new long[COUNT];
double[] doubleValues=new double[COUNT];
String[] stringValues=new String[COUNT];
BytesWritable[] byteValues=new BytesWritable[COUNT];
String[] words=new String[128];
for (int i=0; i < words.length; ++i) {
words[i]=Integer.toHexString(rand.nextInt());
}
for (int i=0; i < COUNT / 2; ++i) {
intValues[2 * i]=rand.nextLong();
intValues[2 * i + 1]=intValues[2 * i];
stringValues[2 * i]=words[rand.nextInt(words.length)];
stringValues[2 * i + 1]=stringValues[2 * i];
}
for (int i=0; i < COUNT; ++i) {
doubleValues[i]=rand.nextDouble();
byte[] buf=new byte[20];
rand.nextBytes(buf);
byteValues[i]=new BytesWritable(buf);
}
for (int i=0; i < COUNT; ++i) {
writer.addRow(createRandomRow(intValues,doubleValues,stringValues,byteValues,words,i));
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
assertEquals(COUNT,reader.getNumberOfRows());
RecordReader rows=reader.rows();
OrcStruct row=null;
for (int i=COUNT - 1; i >= 0; --i) {
rows.seekToRow(i);
row=(OrcStruct)rows.next(row);
BigRow expected=createRandomRow(intValues,doubleValues,stringValues,byteValues,words,i);
assertEquals(expected.boolean1.booleanValue(),((BooleanWritable)row.getFieldValue(0)).get());
assertEquals(expected.byte1.byteValue(),((ByteWritable)row.getFieldValue(1)).get());
assertEquals(expected.short1.shortValue(),((ShortWritable)row.getFieldValue(2)).get());
assertEquals(expected.int1.intValue(),((IntWritable)row.getFieldValue(3)).get());
assertEquals(expected.long1.longValue(),((LongWritable)row.getFieldValue(4)).get());
assertEquals(expected.float1,((FloatWritable)row.getFieldValue(5)).get(),0.0001);
assertEquals(expected.double1,((DoubleWritable)row.getFieldValue(6)).get(),0.0001);
assertEquals(expected.bytes1,row.getFieldValue(7));
assertEquals(expected.string1,row.getFieldValue(8));
List expectedList=expected.middle.list;
List actualList=(List)((OrcStruct)row.getFieldValue(9)).getFieldValue(0);
compareList(expectedList,actualList);
compareList(expected.list,(List)row.getFieldValue(10));
}
rows.close();
Iterator stripeIterator=reader.getStripes().iterator();
long offsetOfStripe2=0;
long offsetOfStripe4=0;
long lastRowOfStripe2=0;
for (int i=0; i < 5; ++i) {
StripeInformation stripe=stripeIterator.next();
if (i < 2) {
lastRowOfStripe2+=stripe.getNumberOfRows();
}
else if (i == 2) {
offsetOfStripe2=stripe.getOffset();
lastRowOfStripe2+=stripe.getNumberOfRows() - 1;
}
else if (i == 4) {
offsetOfStripe4=stripe.getOffset();
}
}
boolean[] columns=new boolean[reader.getStatistics().length];
columns[5]=true;
columns[9]=true;
rows=reader.rowsOptions(new Reader.Options().range(offsetOfStripe2,offsetOfStripe4 - offsetOfStripe2).include(columns));
rows.seekToRow(lastRowOfStripe2);
for (int i=0; i < 2; ++i) {
row=(OrcStruct)rows.next(row);
BigRow expected=createRandomRow(intValues,doubleValues,stringValues,byteValues,words,(int)(lastRowOfStripe2 + i));
assertEquals(expected.long1.longValue(),((LongWritable)row.getFieldValue(4)).get());
assertEquals(expected.string1,row.getFieldValue(8));
}
rows.close();
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void emptyFile() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(BigRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(1000).compress(CompressionKind.NONE).bufferSize(100));
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
assertEquals(false,reader.rows().hasNext());
assertEquals(CompressionKind.NONE,reader.getCompression());
assertEquals(0,reader.getNumberOfRows());
assertEquals(0,reader.getCompressionSize());
assertEquals(false,reader.getMetadataKeys().iterator().hasNext());
assertEquals(3,reader.getContentLength());
assertEquals(false,reader.getStripes().iterator().hasNext());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void metaData() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(BigRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(1000).compress(CompressionKind.NONE).bufferSize(100));
writer.addUserMetadata("my.meta",byteBuf(1,2,3,4,5,6,7,-1,-2,127,-128));
writer.addUserMetadata("clobber",byteBuf(1,2,3));
writer.addUserMetadata("clobber",byteBuf(4,3,2,1));
ByteBuffer bigBuf=ByteBuffer.allocate(40000);
Random random=new Random(0);
random.nextBytes(bigBuf.array());
writer.addUserMetadata("big",bigBuf);
bigBuf.position(0);
writer.addRow(new BigRow(true,(byte)127,(short)1024,42,42L * 1024 * 1024* 1024,(float)3.1415,-2.713,null,null,null,null,null));
writer.addUserMetadata("clobber",byteBuf(5,7,11,13,17,19));
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
assertEquals(byteBuf(5,7,11,13,17,19),reader.getMetadataValue("clobber"));
assertEquals(byteBuf(1,2,3,4,5,6,7,-1,-2,127,-128),reader.getMetadataValue("my.meta"));
assertEquals(bigBuf,reader.getMetadataValue("big"));
try {
reader.getMetadataValue("unknown");
assertTrue(false);
}
catch ( IllegalArgumentException iae) {
}
int i=0;
for ( String key : reader.getMetadataKeys()) {
if ("my.meta".equals(key) || "clobber".equals(key) || "big".equals(key)) {
i+=1;
}
else {
throw new IllegalArgumentException("unknown key " + key);
}
}
assertEquals(3,i);
int numStripes=reader.getStripeStatistics().size();
assertEquals(1,numStripes);
}
APIUtilityVerifier BranchVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void test1() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(BigRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000));
writer.addRow(new BigRow(false,(byte)1,(short)1024,65536,Long.MAX_VALUE,(float)1.0,-15.0,bytes(0,1,2,3,4),"hi",new MiddleStruct(inner(1,"bye"),inner(2,"sigh")),list(inner(3,"good"),inner(4,"bad")),map()));
writer.addRow(new BigRow(true,(byte)100,(short)2048,65536,Long.MAX_VALUE,(float)2.0,-5.0,bytes(),"bye",new MiddleStruct(inner(1,"bye"),inner(2,"sigh")),list(inner(100000000,"cat"),inner(-100000,"in"),inner(1234,"hat")),map(inner(5,"chani"),inner(1,"mauddib"))));
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
TypeDescription schema=writer.getSchema();
assertEquals(23,schema.getMaximumId());
boolean[] expected=new boolean[]{false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false};
boolean[] included=OrcUtils.includeColumns("",schema);
assertEquals(true,Arrays.equals(expected,included));
expected=new boolean[]{false,true,false,false,false,false,false,false,false,true,true,true,true,true,true,false,false,false,false,true,true,true,true,true};
included=OrcUtils.includeColumns("boolean1,string1,middle,map",schema);
assertEquals(true,Arrays.equals(expected,included));
expected=new boolean[]{false,true,false,false,false,false,false,false,false,true,true,true,true,true,true,false,false,false,false,true,true,true,true,true};
included=OrcUtils.includeColumns("boolean1,string1,middle,map",schema);
assertEquals(true,Arrays.equals(expected,included));
expected=new boolean[]{false,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true};
included=OrcUtils.includeColumns("boolean1,byte1,short1,int1,long1,float1,double1,bytes1,string1,middle,list,map",schema);
assertEquals(true,Arrays.equals(expected,included));
ColumnStatistics[] stats=reader.getStatistics();
assertEquals(2,stats[1].getNumberOfValues());
assertEquals(1,((BooleanColumnStatistics)stats[1]).getFalseCount());
assertEquals(1,((BooleanColumnStatistics)stats[1]).getTrueCount());
assertEquals("count: 2 hasNull: false true: 1",stats[1].toString());
assertEquals(2048,((IntegerColumnStatistics)stats[3]).getMaximum());
assertEquals(1024,((IntegerColumnStatistics)stats[3]).getMinimum());
assertEquals(true,((IntegerColumnStatistics)stats[3]).isSumDefined());
assertEquals(3072,((IntegerColumnStatistics)stats[3]).getSum());
assertEquals("count: 2 hasNull: false min: 1024 max: 2048 sum: 3072",stats[3].toString());
StripeStatistics ss=reader.getStripeStatistics().get(0);
assertEquals(2,ss.getColumnStatistics()[0].getNumberOfValues());
assertEquals(1,((BooleanColumnStatistics)ss.getColumnStatistics()[1]).getTrueCount());
assertEquals(1024,((IntegerColumnStatistics)ss.getColumnStatistics()[3]).getMinimum());
assertEquals(2048,((IntegerColumnStatistics)ss.getColumnStatistics()[3]).getMaximum());
assertEquals(3072,((IntegerColumnStatistics)ss.getColumnStatistics()[3]).getSum());
assertEquals(-15.0,((DoubleColumnStatistics)stats[7]).getMinimum());
assertEquals(-5.0,((DoubleColumnStatistics)stats[7]).getMaximum());
assertEquals(-20.0,((DoubleColumnStatistics)stats[7]).getSum(),0.00001);
assertEquals("count: 2 hasNull: false min: -15.0 max: -5.0 sum: -20.0",stats[7].toString());
assertEquals("count: 2 hasNull: false min: bye max: hi sum: 5",stats[9].toString());
StructObjectInspector readerInspector=(StructObjectInspector)reader.getObjectInspector();
assertEquals(ObjectInspector.Category.STRUCT,readerInspector.getCategory());
assertEquals("struct>>,list:array>,"+ "map:map>>",readerInspector.getTypeName());
List extends StructField> fields=readerInspector.getAllStructFieldRefs();
BooleanObjectInspector bo=(BooleanObjectInspector)readerInspector.getStructFieldRef("boolean1").getFieldObjectInspector();
ByteObjectInspector by=(ByteObjectInspector)readerInspector.getStructFieldRef("byte1").getFieldObjectInspector();
ShortObjectInspector sh=(ShortObjectInspector)readerInspector.getStructFieldRef("short1").getFieldObjectInspector();
IntObjectInspector in=(IntObjectInspector)readerInspector.getStructFieldRef("int1").getFieldObjectInspector();
LongObjectInspector lo=(LongObjectInspector)readerInspector.getStructFieldRef("long1").getFieldObjectInspector();
FloatObjectInspector fl=(FloatObjectInspector)readerInspector.getStructFieldRef("float1").getFieldObjectInspector();
DoubleObjectInspector dbl=(DoubleObjectInspector)readerInspector.getStructFieldRef("double1").getFieldObjectInspector();
BinaryObjectInspector bi=(BinaryObjectInspector)readerInspector.getStructFieldRef("bytes1").getFieldObjectInspector();
StringObjectInspector st=(StringObjectInspector)readerInspector.getStructFieldRef("string1").getFieldObjectInspector();
StructObjectInspector mid=(StructObjectInspector)readerInspector.getStructFieldRef("middle").getFieldObjectInspector();
List extends StructField> midFields=mid.getAllStructFieldRefs();
ListObjectInspector midli=(ListObjectInspector)midFields.get(0).getFieldObjectInspector();
StructObjectInspector inner=(StructObjectInspector)midli.getListElementObjectInspector();
List extends StructField> inFields=inner.getAllStructFieldRefs();
ListObjectInspector li=(ListObjectInspector)readerInspector.getStructFieldRef("list").getFieldObjectInspector();
MapObjectInspector ma=(MapObjectInspector)readerInspector.getStructFieldRef("map").getFieldObjectInspector();
StringObjectInspector mk=(StringObjectInspector)ma.getMapKeyObjectInspector();
RecordReader rows=reader.rows();
Object row=rows.next(null);
assertNotNull(row);
assertEquals(false,bo.get(readerInspector.getStructFieldData(row,fields.get(0))));
assertEquals(1,by.get(readerInspector.getStructFieldData(row,fields.get(1))));
assertEquals(1024,sh.get(readerInspector.getStructFieldData(row,fields.get(2))));
assertEquals(65536,in.get(readerInspector.getStructFieldData(row,fields.get(3))));
assertEquals(Long.MAX_VALUE,lo.get(readerInspector.getStructFieldData(row,fields.get(4))));
assertEquals(1.0,fl.get(readerInspector.getStructFieldData(row,fields.get(5))),0.00001);
assertEquals(-15.0,dbl.get(readerInspector.getStructFieldData(row,fields.get(6))),0.00001);
assertEquals(bytes(0,1,2,3,4),bi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,fields.get(7))));
assertEquals("hi",st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(8))));
List> midRow=midli.getList(mid.getStructFieldData(readerInspector.getStructFieldData(row,fields.get(9)),midFields.get(0)));
assertNotNull(midRow);
assertEquals(2,midRow.size());
assertEquals(1,in.get(inner.getStructFieldData(midRow.get(0),inFields.get(0))));
assertEquals("bye",st.getPrimitiveJavaObject(inner.getStructFieldData(midRow.get(0),inFields.get(1))));
assertEquals(2,in.get(inner.getStructFieldData(midRow.get(1),inFields.get(0))));
assertEquals("sigh",st.getPrimitiveJavaObject(inner.getStructFieldData(midRow.get(1),inFields.get(1))));
List> list=li.getList(readerInspector.getStructFieldData(row,fields.get(10)));
assertEquals(2,list.size());
assertEquals(3,in.get(inner.getStructFieldData(list.get(0),inFields.get(0))));
assertEquals("good",st.getPrimitiveJavaObject(inner.getStructFieldData(list.get(0),inFields.get(1))));
assertEquals(4,in.get(inner.getStructFieldData(list.get(1),inFields.get(0))));
assertEquals("bad",st.getPrimitiveJavaObject(inner.getStructFieldData(list.get(1),inFields.get(1))));
Map,?> map=ma.getMap(readerInspector.getStructFieldData(row,fields.get(11)));
assertEquals(0,map.size());
assertEquals(true,rows.hasNext());
row=rows.next(row);
assertEquals(true,bo.get(readerInspector.getStructFieldData(row,fields.get(0))));
assertEquals(100,by.get(readerInspector.getStructFieldData(row,fields.get(1))));
assertEquals(2048,sh.get(readerInspector.getStructFieldData(row,fields.get(2))));
assertEquals(65536,in.get(readerInspector.getStructFieldData(row,fields.get(3))));
assertEquals(Long.MAX_VALUE,lo.get(readerInspector.getStructFieldData(row,fields.get(4))));
assertEquals(2.0,fl.get(readerInspector.getStructFieldData(row,fields.get(5))),0.00001);
assertEquals(-5.0,dbl.get(readerInspector.getStructFieldData(row,fields.get(6))),0.00001);
assertEquals(bytes(),bi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,fields.get(7))));
assertEquals("bye",st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(8))));
midRow=midli.getList(mid.getStructFieldData(readerInspector.getStructFieldData(row,fields.get(9)),midFields.get(0)));
assertNotNull(midRow);
assertEquals(2,midRow.size());
assertEquals(1,in.get(inner.getStructFieldData(midRow.get(0),inFields.get(0))));
assertEquals("bye",st.getPrimitiveJavaObject(inner.getStructFieldData(midRow.get(0),inFields.get(1))));
assertEquals(2,in.get(inner.getStructFieldData(midRow.get(1),inFields.get(0))));
assertEquals("sigh",st.getPrimitiveJavaObject(inner.getStructFieldData(midRow.get(1),inFields.get(1))));
list=li.getList(readerInspector.getStructFieldData(row,fields.get(10)));
assertEquals(3,list.size());
assertEquals(100000000,in.get(inner.getStructFieldData(list.get(0),inFields.get(0))));
assertEquals("cat",st.getPrimitiveJavaObject(inner.getStructFieldData(list.get(0),inFields.get(1))));
assertEquals(-100000,in.get(inner.getStructFieldData(list.get(1),inFields.get(0))));
assertEquals("in",st.getPrimitiveJavaObject(inner.getStructFieldData(list.get(1),inFields.get(1))));
assertEquals(1234,in.get(inner.getStructFieldData(list.get(2),inFields.get(0))));
assertEquals("hat",st.getPrimitiveJavaObject(inner.getStructFieldData(list.get(2),inFields.get(1))));
map=ma.getMap(readerInspector.getStructFieldData(row,fields.get(11)));
assertEquals(2,map.size());
boolean[] found=new boolean[2];
for ( Object key : map.keySet()) {
String str=mk.getPrimitiveJavaObject(key);
if (str.equals("chani")) {
assertEquals(false,found[0]);
assertEquals(5,in.get(inner.getStructFieldData(map.get(key),inFields.get(0))));
assertEquals(str,st.getPrimitiveJavaObject(inner.getStructFieldData(map.get(key),inFields.get(1))));
found[0]=true;
}
else if (str.equals("mauddib")) {
assertEquals(false,found[1]);
assertEquals(1,in.get(inner.getStructFieldData(map.get(key),inFields.get(0))));
assertEquals(str,st.getPrimitiveJavaObject(inner.getStructFieldData(map.get(key),inFields.get(1))));
found[1]=true;
}
else {
throw new IllegalArgumentException("Unknown key " + str);
}
}
assertEquals(true,found[0]);
assertEquals(true,found[1]);
assertEquals(false,rows.hasNext());
rows.close();
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
/**
* Read and write a randomly generated snappy file.
* @throws Exception
*/
@Test public void testWithoutIndex() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(InnerStruct.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(5000).compress(CompressionKind.SNAPPY).bufferSize(1000).rowIndexStride(0));
Random rand=new Random(24);
for (int i=0; i < 10000; ++i) {
InnerStruct row=new InnerStruct(rand.nextInt(),Integer.toBinaryString(rand.nextInt()));
for (int j=0; j < 5; ++j) {
writer.addRow(row);
}
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
assertEquals(50000,reader.getNumberOfRows());
assertEquals(0,reader.getRowIndexStride());
StripeInformation stripe=reader.getStripes().iterator().next();
assertEquals(true,stripe.getDataLength() != 0);
assertEquals(0,stripe.getIndexLength());
RecordReader rows=reader.rows();
rand=new Random(24);
OrcStruct row=null;
for (int i=0; i < 10000; ++i) {
int intVal=rand.nextInt();
String strVal=Integer.toBinaryString(rand.nextInt());
for (int j=0; j < 5; ++j) {
assertEquals(true,rows.hasNext());
row=(OrcStruct)rows.next(row);
assertEquals(intVal,((IntWritable)row.getFieldValue(0)).get());
assertEquals(strVal,row.getFieldValue(1).toString());
}
}
assertEquals(false,rows.hasNext());
rows.close();
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testStripeLevelStats() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(InnerStruct.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000).batchSize(1000));
for (int i=0; i < 11000; i++) {
if (i >= 5000) {
if (i >= 10000) {
writer.addRow(new InnerStruct(3,"three"));
}
else {
writer.addRow(new InnerStruct(2,"two"));
}
}
else {
writer.addRow(new InnerStruct(1,"one"));
}
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
TypeDescription schema=writer.getSchema();
assertEquals(2,schema.getMaximumId());
boolean[] expected=new boolean[]{false,true,false};
boolean[] included=OrcUtils.includeColumns("int1",schema);
assertEquals(true,Arrays.equals(expected,included));
List stats=reader.getStripeStatistics();
int numStripes=stats.size();
assertEquals(3,numStripes);
StripeStatistics ss1=stats.get(0);
StripeStatistics ss2=stats.get(1);
StripeStatistics ss3=stats.get(2);
assertEquals(5000,ss1.getColumnStatistics()[0].getNumberOfValues());
assertEquals(5000,ss2.getColumnStatistics()[0].getNumberOfValues());
assertEquals(1000,ss3.getColumnStatistics()[0].getNumberOfValues());
assertEquals(5000,(ss1.getColumnStatistics()[1]).getNumberOfValues());
assertEquals(5000,(ss2.getColumnStatistics()[1]).getNumberOfValues());
assertEquals(1000,(ss3.getColumnStatistics()[1]).getNumberOfValues());
assertEquals(1,((IntegerColumnStatistics)ss1.getColumnStatistics()[1]).getMinimum());
assertEquals(2,((IntegerColumnStatistics)ss2.getColumnStatistics()[1]).getMinimum());
assertEquals(3,((IntegerColumnStatistics)ss3.getColumnStatistics()[1]).getMinimum());
assertEquals(1,((IntegerColumnStatistics)ss1.getColumnStatistics()[1]).getMaximum());
assertEquals(2,((IntegerColumnStatistics)ss2.getColumnStatistics()[1]).getMaximum());
assertEquals(3,((IntegerColumnStatistics)ss3.getColumnStatistics()[1]).getMaximum());
assertEquals(5000,((IntegerColumnStatistics)ss1.getColumnStatistics()[1]).getSum());
assertEquals(10000,((IntegerColumnStatistics)ss2.getColumnStatistics()[1]).getSum());
assertEquals(3000,((IntegerColumnStatistics)ss3.getColumnStatistics()[1]).getSum());
assertEquals(5000,(ss1.getColumnStatistics()[2]).getNumberOfValues());
assertEquals(5000,(ss2.getColumnStatistics()[2]).getNumberOfValues());
assertEquals(1000,(ss3.getColumnStatistics()[2]).getNumberOfValues());
assertEquals("one",((StringColumnStatistics)ss1.getColumnStatistics()[2]).getMinimum());
assertEquals("two",((StringColumnStatistics)ss2.getColumnStatistics()[2]).getMinimum());
assertEquals("three",((StringColumnStatistics)ss3.getColumnStatistics()[2]).getMinimum());
assertEquals("one",((StringColumnStatistics)ss1.getColumnStatistics()[2]).getMaximum());
assertEquals("two",((StringColumnStatistics)ss2.getColumnStatistics()[2]).getMaximum());
assertEquals("three",((StringColumnStatistics)ss3.getColumnStatistics()[2]).getMaximum());
assertEquals(15000,((StringColumnStatistics)ss1.getColumnStatistics()[2]).getSum());
assertEquals(15000,((StringColumnStatistics)ss2.getColumnStatistics()[2]).getSum());
assertEquals(5000,((StringColumnStatistics)ss3.getColumnStatistics()[2]).getSum());
RecordReaderImpl recordReader=(RecordReaderImpl)reader.rows();
OrcProto.RowIndex[] index=recordReader.readRowIndex(0,null,null).getRowGroupIndex();
assertEquals(3,index.length);
List items=index[1].getEntryList();
assertEquals(1,items.size());
assertEquals(3,items.get(0).getPositionsCount());
assertEquals(0,items.get(0).getPositions(0));
assertEquals(0,items.get(0).getPositions(1));
assertEquals(0,items.get(0).getPositions(2));
assertEquals(1,items.get(0).getStatistics().getIntStatistics().getMinimum());
index=recordReader.readRowIndex(1,null,null).getRowGroupIndex();
assertEquals(3,index.length);
items=index[1].getEntryList();
assertEquals(2,items.get(0).getStatistics().getIntStatistics().getMaximum());
}
APIUtilityVerifier IterativeVerifier BranchVerifier InternalCallVerifier EqualityVerifier
/**
* We test union, timestamp, and decimal separately since we need to make the
* object inspector manually. (The Hive reflection-based doesn't handle
* them properly.)
*/
@Test public void testUnionAndTimestamp() throws Exception {
List types=new ArrayList();
types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.STRUCT).addFieldNames("time").addFieldNames("union").addFieldNames("decimal").addSubtypes(1).addSubtypes(2).addSubtypes(5).build());
types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.TIMESTAMP).build());
types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.UNION).addSubtypes(3).addSubtypes(4).build());
types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.INT).build());
types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.STRING).build());
types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.DECIMAL).build());
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=OrcStruct.createObjectInspector(0,types);
}
HiveDecimal maxValue=HiveDecimal.create("10000000000000000000");
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(1000).compress(CompressionKind.NONE).batchSize(1000).bufferSize(100).blockPadding(false));
OrcStruct row=new OrcStruct(3);
OrcUnion union=new OrcUnion();
row.setFieldValue(1,union);
row.setFieldValue(0,new TimestampWritable(Timestamp.valueOf("2000-03-12 15:00:00")));
HiveDecimal value=HiveDecimal.create("12345678.6547456");
row.setFieldValue(2,new HiveDecimalWritable(value));
union.set((byte)0,new IntWritable(42));
writer.addRow(row);
row.setFieldValue(0,new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")));
union.set((byte)1,new Text("hello"));
value=HiveDecimal.create("-5643.234");
row.setFieldValue(2,new HiveDecimalWritable(value));
writer.addRow(row);
row.setFieldValue(0,null);
row.setFieldValue(1,null);
row.setFieldValue(2,null);
writer.addRow(row);
row.setFieldValue(1,union);
union.set((byte)0,null);
writer.addRow(row);
union.set((byte)1,null);
writer.addRow(row);
union.set((byte)0,new IntWritable(200000));
row.setFieldValue(0,new TimestampWritable(Timestamp.valueOf("1970-01-01 00:00:00")));
value=HiveDecimal.create("10000000000000000000");
row.setFieldValue(2,new HiveDecimalWritable(value));
writer.addRow(row);
Random rand=new Random(42);
for (int i=1970; i < 2038; ++i) {
row.setFieldValue(0,new TimestampWritable(Timestamp.valueOf(i + "-05-05 12:34:56." + i)));
if ((i & 1) == 0) {
union.set((byte)0,new IntWritable(i * i));
}
else {
union.set((byte)1,new Text(Integer.toString(i * i)));
}
value=HiveDecimal.create(new BigInteger(64,rand),rand.nextInt(18));
row.setFieldValue(2,new HiveDecimalWritable(value));
if (maxValue.compareTo(value) < 0) {
maxValue=value;
}
writer.addRow(row);
}
row.setFieldValue(0,null);
union.set((byte)0,new IntWritable(1732050807));
row.setFieldValue(2,null);
for (int i=0; i < 5000; ++i) {
writer.addRow(row);
}
union.set((byte)0,new IntWritable(0));
writer.addRow(row);
union.set((byte)0,new IntWritable(10));
writer.addRow(row);
union.set((byte)0,new IntWritable(138));
writer.addRow(row);
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
TypeDescription schema=writer.getSchema();
assertEquals(5,schema.getMaximumId());
boolean[] expected=new boolean[]{false,false,false,false,false,false};
boolean[] included=OrcUtils.includeColumns("",schema);
assertEquals(true,Arrays.equals(expected,included));
expected=new boolean[]{false,true,false,false,false,true};
included=OrcUtils.includeColumns("time,decimal",schema);
assertEquals(true,Arrays.equals(expected,included));
expected=new boolean[]{false,false,true,true,true,false};
included=OrcUtils.includeColumns("union",schema);
assertEquals(true,Arrays.equals(expected,included));
assertEquals(false,reader.getMetadataKeys().iterator().hasNext());
assertEquals(5077,reader.getNumberOfRows());
DecimalColumnStatistics stats=(DecimalColumnStatistics)reader.getStatistics()[5];
assertEquals(71,stats.getNumberOfValues());
assertEquals(HiveDecimal.create("-5643.234"),stats.getMinimum());
assertEquals(maxValue,stats.getMaximum());
int stripeCount=0;
int rowCount=0;
long currentOffset=-1;
for ( StripeInformation stripe : reader.getStripes()) {
stripeCount+=1;
rowCount+=stripe.getNumberOfRows();
if (currentOffset < 0) {
currentOffset=stripe.getOffset() + stripe.getLength();
}
else {
assertEquals(currentOffset,stripe.getOffset());
currentOffset+=stripe.getLength();
}
}
assertEquals(reader.getNumberOfRows(),rowCount);
assertEquals(2,stripeCount);
assertEquals(reader.getContentLength(),currentOffset);
RecordReader rows=reader.rows();
assertEquals(0,rows.getRowNumber());
assertEquals(0.0,rows.getProgress(),0.000001);
assertEquals(true,rows.hasNext());
row=(OrcStruct)rows.next(null);
assertEquals(1,rows.getRowNumber());
inspector=reader.getObjectInspector();
assertEquals("struct,decimal:decimal(38,18)>",inspector.getTypeName());
assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-12 15:00:00")),row.getFieldValue(0));
union=(OrcUnion)row.getFieldValue(1);
assertEquals(0,union.getTag());
assertEquals(new IntWritable(42),union.getObject());
assertEquals(new HiveDecimalWritable(HiveDecimal.create("12345678.6547456")),row.getFieldValue(2));
row=(OrcStruct)rows.next(row);
assertEquals(2,rows.getRowNumber());
assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")),row.getFieldValue(0));
assertEquals(1,union.getTag());
assertEquals(new Text("hello"),union.getObject());
assertEquals(new HiveDecimalWritable(HiveDecimal.create("-5643.234")),row.getFieldValue(2));
row=(OrcStruct)rows.next(row);
assertEquals(null,row.getFieldValue(0));
assertEquals(null,row.getFieldValue(1));
assertEquals(null,row.getFieldValue(2));
row=(OrcStruct)rows.next(row);
assertEquals(null,row.getFieldValue(0));
union=(OrcUnion)row.getFieldValue(1);
assertEquals(0,union.getTag());
assertEquals(null,union.getObject());
assertEquals(null,row.getFieldValue(2));
row=(OrcStruct)rows.next(row);
assertEquals(null,row.getFieldValue(0));
assertEquals(1,union.getTag());
assertEquals(null,union.getObject());
assertEquals(null,row.getFieldValue(2));
row=(OrcStruct)rows.next(row);
assertEquals(new TimestampWritable(Timestamp.valueOf("1970-01-01 00:00:00")),row.getFieldValue(0));
assertEquals(new IntWritable(200000),union.getObject());
assertEquals(new HiveDecimalWritable(HiveDecimal.create("10000000000000000000")),row.getFieldValue(2));
rand=new Random(42);
for (int i=1970; i < 2038; ++i) {
row=(OrcStruct)rows.next(row);
assertEquals(new TimestampWritable(Timestamp.valueOf(i + "-05-05 12:34:56." + i)),row.getFieldValue(0));
if ((i & 1) == 0) {
assertEquals(0,union.getTag());
assertEquals(new IntWritable(i * i),union.getObject());
}
else {
assertEquals(1,union.getTag());
assertEquals(new Text(Integer.toString(i * i)),union.getObject());
}
assertEquals(new HiveDecimalWritable(HiveDecimal.create(new BigInteger(64,rand),rand.nextInt(18))),row.getFieldValue(2));
}
for (int i=0; i < 5000; ++i) {
row=(OrcStruct)rows.next(row);
assertEquals(new IntWritable(1732050807),union.getObject());
}
row=(OrcStruct)rows.next(row);
assertEquals(new IntWritable(0),union.getObject());
row=(OrcStruct)rows.next(row);
assertEquals(new IntWritable(10),union.getObject());
row=(OrcStruct)rows.next(row);
assertEquals(new IntWritable(138),union.getObject());
assertEquals(false,rows.hasNext());
assertEquals(1.0,rows.getProgress(),0.00001);
assertEquals(reader.getNumberOfRows(),rows.getRowNumber());
rows.seekToRow(1);
row=(OrcStruct)rows.next(row);
assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")),row.getFieldValue(0));
assertEquals(1,union.getTag());
assertEquals(new Text("hello"),union.getObject());
assertEquals(new HiveDecimalWritable(HiveDecimal.create("-5643.234")),row.getFieldValue(2));
rows.close();
}
APIUtilityVerifier BranchVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testReadFormat_0_11() throws Exception {
Path oldFilePath=new Path(HiveTestUtils.getFileFromClasspath("orc-file-11-format.orc"));
Reader reader=OrcFile.createReader(oldFilePath,OrcFile.readerOptions(conf).filesystem(fs));
int stripeCount=0;
int rowCount=0;
long currentOffset=-1;
for ( StripeInformation stripe : reader.getStripes()) {
stripeCount+=1;
rowCount+=stripe.getNumberOfRows();
if (currentOffset < 0) {
currentOffset=stripe.getOffset() + stripe.getIndexLength() + stripe.getDataLength()+ stripe.getFooterLength();
}
else {
assertEquals(currentOffset,stripe.getOffset());
currentOffset+=stripe.getIndexLength() + stripe.getDataLength() + stripe.getFooterLength();
}
}
assertEquals(reader.getNumberOfRows(),rowCount);
assertEquals(2,stripeCount);
ColumnStatistics[] stats=reader.getStatistics();
assertEquals(7500,stats[1].getNumberOfValues());
assertEquals(3750,((BooleanColumnStatistics)stats[1]).getFalseCount());
assertEquals(3750,((BooleanColumnStatistics)stats[1]).getTrueCount());
assertEquals("count: 7500 hasNull: true true: 3750",stats[1].toString());
assertEquals(2048,((IntegerColumnStatistics)stats[3]).getMaximum());
assertEquals(1024,((IntegerColumnStatistics)stats[3]).getMinimum());
assertEquals(true,((IntegerColumnStatistics)stats[3]).isSumDefined());
assertEquals(11520000,((IntegerColumnStatistics)stats[3]).getSum());
assertEquals("count: 7500 hasNull: true min: 1024 max: 2048 sum: 11520000",stats[3].toString());
assertEquals(Long.MAX_VALUE,((IntegerColumnStatistics)stats[5]).getMaximum());
assertEquals(Long.MAX_VALUE,((IntegerColumnStatistics)stats[5]).getMinimum());
assertEquals(false,((IntegerColumnStatistics)stats[5]).isSumDefined());
assertEquals("count: 7500 hasNull: true min: 9223372036854775807 max: 9223372036854775807",stats[5].toString());
assertEquals(-15.0,((DoubleColumnStatistics)stats[7]).getMinimum());
assertEquals(-5.0,((DoubleColumnStatistics)stats[7]).getMaximum());
assertEquals(-75000.0,((DoubleColumnStatistics)stats[7]).getSum(),0.00001);
assertEquals("count: 7500 hasNull: true min: -15.0 max: -5.0 sum: -75000.0",stats[7].toString());
assertEquals("count: 7500 hasNull: true min: bye max: hi sum: 0",stats[9].toString());
StructObjectInspector readerInspector=(StructObjectInspector)reader.getObjectInspector();
assertEquals(ObjectInspector.Category.STRUCT,readerInspector.getCategory());
assertEquals("struct>>,list:array>,"+ "map:map>,ts:timestamp,"+ "decimal1:decimal(38,18)>",readerInspector.getTypeName());
List extends StructField> fields=readerInspector.getAllStructFieldRefs();
BooleanObjectInspector bo=(BooleanObjectInspector)readerInspector.getStructFieldRef("boolean1").getFieldObjectInspector();
ByteObjectInspector by=(ByteObjectInspector)readerInspector.getStructFieldRef("byte1").getFieldObjectInspector();
ShortObjectInspector sh=(ShortObjectInspector)readerInspector.getStructFieldRef("short1").getFieldObjectInspector();
IntObjectInspector in=(IntObjectInspector)readerInspector.getStructFieldRef("int1").getFieldObjectInspector();
LongObjectInspector lo=(LongObjectInspector)readerInspector.getStructFieldRef("long1").getFieldObjectInspector();
FloatObjectInspector fl=(FloatObjectInspector)readerInspector.getStructFieldRef("float1").getFieldObjectInspector();
DoubleObjectInspector dbl=(DoubleObjectInspector)readerInspector.getStructFieldRef("double1").getFieldObjectInspector();
BinaryObjectInspector bi=(BinaryObjectInspector)readerInspector.getStructFieldRef("bytes1").getFieldObjectInspector();
StringObjectInspector st=(StringObjectInspector)readerInspector.getStructFieldRef("string1").getFieldObjectInspector();
StructObjectInspector mid=(StructObjectInspector)readerInspector.getStructFieldRef("middle").getFieldObjectInspector();
List extends StructField> midFields=mid.getAllStructFieldRefs();
ListObjectInspector midli=(ListObjectInspector)midFields.get(0).getFieldObjectInspector();
StructObjectInspector inner=(StructObjectInspector)midli.getListElementObjectInspector();
List extends StructField> inFields=inner.getAllStructFieldRefs();
ListObjectInspector li=(ListObjectInspector)readerInspector.getStructFieldRef("list").getFieldObjectInspector();
MapObjectInspector ma=(MapObjectInspector)readerInspector.getStructFieldRef("map").getFieldObjectInspector();
TimestampObjectInspector tso=(TimestampObjectInspector)readerInspector.getStructFieldRef("ts").getFieldObjectInspector();
HiveDecimalObjectInspector dco=(HiveDecimalObjectInspector)readerInspector.getStructFieldRef("decimal1").getFieldObjectInspector();
StringObjectInspector mk=(StringObjectInspector)ma.getMapKeyObjectInspector();
RecordReader rows=reader.rows();
Object row=rows.next(null);
assertNotNull(row);
assertEquals(false,bo.get(readerInspector.getStructFieldData(row,fields.get(0))));
assertEquals(1,by.get(readerInspector.getStructFieldData(row,fields.get(1))));
assertEquals(1024,sh.get(readerInspector.getStructFieldData(row,fields.get(2))));
assertEquals(65536,in.get(readerInspector.getStructFieldData(row,fields.get(3))));
assertEquals(Long.MAX_VALUE,lo.get(readerInspector.getStructFieldData(row,fields.get(4))));
assertEquals(1.0,fl.get(readerInspector.getStructFieldData(row,fields.get(5))),0.00001);
assertEquals(-15.0,dbl.get(readerInspector.getStructFieldData(row,fields.get(6))),0.00001);
assertEquals(bytes(0,1,2,3,4),bi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,fields.get(7))));
assertEquals("hi",st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(8))));
List> midRow=midli.getList(mid.getStructFieldData(readerInspector.getStructFieldData(row,fields.get(9)),midFields.get(0)));
assertNotNull(midRow);
assertEquals(2,midRow.size());
assertEquals(1,in.get(inner.getStructFieldData(midRow.get(0),inFields.get(0))));
assertEquals("bye",st.getPrimitiveJavaObject(inner.getStructFieldData(midRow.get(0),inFields.get(1))));
assertEquals(2,in.get(inner.getStructFieldData(midRow.get(1),inFields.get(0))));
assertEquals("sigh",st.getPrimitiveJavaObject(inner.getStructFieldData(midRow.get(1),inFields.get(1))));
List> list=li.getList(readerInspector.getStructFieldData(row,fields.get(10)));
assertEquals(2,list.size());
assertEquals(3,in.get(inner.getStructFieldData(list.get(0),inFields.get(0))));
assertEquals("good",st.getPrimitiveJavaObject(inner.getStructFieldData(list.get(0),inFields.get(1))));
assertEquals(4,in.get(inner.getStructFieldData(list.get(1),inFields.get(0))));
assertEquals("bad",st.getPrimitiveJavaObject(inner.getStructFieldData(list.get(1),inFields.get(1))));
Map,?> map=ma.getMap(readerInspector.getStructFieldData(row,fields.get(11)));
assertEquals(0,map.size());
assertEquals(Timestamp.valueOf("2000-03-12 15:00:00"),tso.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(12))));
assertEquals(HiveDecimal.create("12345678.6547456"),dco.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(13))));
assertEquals(true,rows.hasNext());
rows.seekToRow(7499);
row=rows.next(null);
assertEquals(true,bo.get(readerInspector.getStructFieldData(row,fields.get(0))));
assertEquals(100,by.get(readerInspector.getStructFieldData(row,fields.get(1))));
assertEquals(2048,sh.get(readerInspector.getStructFieldData(row,fields.get(2))));
assertEquals(65536,in.get(readerInspector.getStructFieldData(row,fields.get(3))));
assertEquals(Long.MAX_VALUE,lo.get(readerInspector.getStructFieldData(row,fields.get(4))));
assertEquals(2.0,fl.get(readerInspector.getStructFieldData(row,fields.get(5))),0.00001);
assertEquals(-5.0,dbl.get(readerInspector.getStructFieldData(row,fields.get(6))),0.00001);
assertEquals(bytes(),bi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,fields.get(7))));
assertEquals("bye",st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(8))));
midRow=midli.getList(mid.getStructFieldData(readerInspector.getStructFieldData(row,fields.get(9)),midFields.get(0)));
assertNotNull(midRow);
assertEquals(2,midRow.size());
assertEquals(1,in.get(inner.getStructFieldData(midRow.get(0),inFields.get(0))));
assertEquals("bye",st.getPrimitiveJavaObject(inner.getStructFieldData(midRow.get(0),inFields.get(1))));
assertEquals(2,in.get(inner.getStructFieldData(midRow.get(1),inFields.get(0))));
assertEquals("sigh",st.getPrimitiveJavaObject(inner.getStructFieldData(midRow.get(1),inFields.get(1))));
list=li.getList(readerInspector.getStructFieldData(row,fields.get(10)));
assertEquals(3,list.size());
assertEquals(100000000,in.get(inner.getStructFieldData(list.get(0),inFields.get(0))));
assertEquals("cat",st.getPrimitiveJavaObject(inner.getStructFieldData(list.get(0),inFields.get(1))));
assertEquals(-100000,in.get(inner.getStructFieldData(list.get(1),inFields.get(0))));
assertEquals("in",st.getPrimitiveJavaObject(inner.getStructFieldData(list.get(1),inFields.get(1))));
assertEquals(1234,in.get(inner.getStructFieldData(list.get(2),inFields.get(0))));
assertEquals("hat",st.getPrimitiveJavaObject(inner.getStructFieldData(list.get(2),inFields.get(1))));
map=ma.getMap(readerInspector.getStructFieldData(row,fields.get(11)));
assertEquals(2,map.size());
boolean[] found=new boolean[2];
for ( Object key : map.keySet()) {
String str=mk.getPrimitiveJavaObject(key);
if (str.equals("chani")) {
assertEquals(false,found[0]);
assertEquals(5,in.get(inner.getStructFieldData(map.get(key),inFields.get(0))));
assertEquals(str,st.getPrimitiveJavaObject(inner.getStructFieldData(map.get(key),inFields.get(1))));
found[0]=true;
}
else if (str.equals("mauddib")) {
assertEquals(false,found[1]);
assertEquals(1,in.get(inner.getStructFieldData(map.get(key),inFields.get(0))));
assertEquals(str,st.getPrimitiveJavaObject(inner.getStructFieldData(map.get(key),inFields.get(1))));
found[1]=true;
}
else {
throw new IllegalArgumentException("Unknown key " + str);
}
}
assertEquals(true,found[0]);
assertEquals(true,found[1]);
assertEquals(Timestamp.valueOf("2000-03-12 15:00:01"),tso.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(12))));
assertEquals(HiveDecimal.create("12345678.6547457"),dco.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(13))));
assertEquals(false,rows.hasNext());
rows.close();
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testMemoryManagementV11() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(InnerStruct.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
MyMemoryManager memory=new MyMemoryManager(conf,10000,0.1);
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).compress(CompressionKind.NONE).stripeSize(50000).bufferSize(100).rowIndexStride(0).memory(memory).batchSize(100).version(OrcFile.Version.V_0_11));
assertEquals(testFilePath,memory.path);
for (int i=0; i < 2500; ++i) {
writer.addRow(new InnerStruct(i * 300,Integer.toHexString(10 * i)));
}
writer.close();
assertEquals(null,memory.path);
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
int i=0;
for ( StripeInformation stripe : reader.getStripes()) {
i+=1;
assertTrue("stripe " + i + " is too long at "+ stripe.getDataLength(),stripe.getDataLength() < 5000);
}
assertEquals(25,i);
assertEquals(2500,reader.getNumberOfRows());
}
APIUtilityVerifier IterativeVerifier BranchVerifier InternalCallVerifier EqualityVerifier
@Test public void columnProjection() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(InnerStruct.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(1000).compress(CompressionKind.NONE).bufferSize(100).rowIndexStride(1000));
Random r1=new Random(1);
Random r2=new Random(2);
int x;
int minInt=0, maxInt=0;
String y;
String minStr=null, maxStr=null;
for (int i=0; i < 21000; ++i) {
x=r1.nextInt();
y=Long.toHexString(r2.nextLong());
if (i == 0 || x < minInt) {
minInt=x;
}
if (i == 0 || x > maxInt) {
maxInt=x;
}
if (i == 0 || y.compareTo(minStr) < 0) {
minStr=y;
}
if (i == 0 || y.compareTo(maxStr) > 0) {
maxStr=y;
}
writer.addRow(inner(x,y));
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
ColumnStatistics[] stats=reader.getStatistics();
assertEquals(3,stats.length);
for ( ColumnStatistics s : stats) {
assertEquals(21000,s.getNumberOfValues());
if (s instanceof IntegerColumnStatistics) {
assertEquals(minInt,((IntegerColumnStatistics)s).getMinimum());
assertEquals(maxInt,((IntegerColumnStatistics)s).getMaximum());
}
else if (s instanceof StringColumnStatistics) {
assertEquals(maxStr,((StringColumnStatistics)s).getMaximum());
assertEquals(minStr,((StringColumnStatistics)s).getMinimum());
}
}
List types=reader.getTypes();
assertEquals(3,types.size());
assertEquals(OrcProto.Type.Kind.STRUCT,types.get(0).getKind());
assertEquals(2,types.get(0).getSubtypesCount());
assertEquals(1,types.get(0).getSubtypes(0));
assertEquals(2,types.get(0).getSubtypes(1));
assertEquals(OrcProto.Type.Kind.INT,types.get(1).getKind());
assertEquals(0,types.get(1).getSubtypesCount());
assertEquals(OrcProto.Type.Kind.STRING,types.get(2).getKind());
assertEquals(0,types.get(2).getSubtypesCount());
RecordReader rows1=reader.rows(new boolean[]{true,true,false});
RecordReader rows2=reader.rows(new boolean[]{true,false,true});
r1=new Random(1);
r2=new Random(2);
OrcStruct row1=null;
OrcStruct row2=null;
for (int i=0; i < 21000; ++i) {
assertEquals(true,rows1.hasNext());
assertEquals(true,rows2.hasNext());
row1=(OrcStruct)rows1.next(row1);
row2=(OrcStruct)rows2.next(row2);
assertEquals(r1.nextInt(),((IntWritable)row1.getFieldValue(0)).get());
assertEquals(Long.toHexString(r2.nextLong()),row2.getFieldValue(1).toString());
}
assertEquals(false,rows1.hasNext());
assertEquals(false,rows2.hasNext());
rows1.close();
rows2.close();
}
Class: org.apache.hadoop.hive.ql.io.orc.TestOrcFileStripeMergeRecordReader EqualityVerifier
@Test public void testSplitStartsWithOffset() throws IOException {
createOrcFile(DEFAULT_STRIPE_SIZE,DEFAULT_STRIPE_SIZE + 1);
FileStatus fileStatus=fs.getFileStatus(tmpPath);
long length=fileStatus.getLen();
long offset=length / 2;
FileSplit split=new FileSplit(tmpPath,offset,length,(String[])null);
OrcFileStripeMergeRecordReader reader=new OrcFileStripeMergeRecordReader(conf,split);
reader.next(key,value);
Assert.assertEquals("InputPath",tmpPath,key.getInputPath());
Assert.assertEquals("NumberOfValues",1L,value.getStripeStatistics().getColStats(0).getNumberOfValues());
reader.close();
}
Class: org.apache.hadoop.hive.ql.io.orc.TestOrcNullOptimization APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testColumnsWithNullAndCompression() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcNullOptimization.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(MyStruct.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000));
writer.addRow(new MyStruct(3,"a",true,Lists.newArrayList(new InnerStruct(100))));
writer.addRow(new MyStruct(null,"b",true,Lists.newArrayList(new InnerStruct(100))));
writer.addRow(new MyStruct(3,null,false,Lists.newArrayList(new InnerStruct(100))));
writer.addRow(new MyStruct(3,"d",true,Lists.newArrayList(new InnerStruct(100))));
writer.addRow(new MyStruct(2,"e",true,Lists.newArrayList(new InnerStruct(100))));
writer.addRow(new MyStruct(2,"f",true,Lists.newArrayList(new InnerStruct(100))));
writer.addRow(new MyStruct(2,"g",true,Lists.newArrayList(new InnerStruct(100))));
writer.addRow(new MyStruct(2,"h",true,Lists.newArrayList(new InnerStruct(100))));
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
ColumnStatistics[] stats=reader.getStatistics();
assertEquals(8,reader.getNumberOfRows());
assertEquals(8,stats[0].getNumberOfValues());
assertEquals(3,((IntegerColumnStatistics)stats[1]).getMaximum());
assertEquals(2,((IntegerColumnStatistics)stats[1]).getMinimum());
assertEquals(true,((IntegerColumnStatistics)stats[1]).isSumDefined());
assertEquals(17,((IntegerColumnStatistics)stats[1]).getSum());
assertEquals("count: 7 hasNull: true min: 2 max: 3 sum: 17",stats[1].toString());
assertEquals("h",((StringColumnStatistics)stats[2]).getMaximum());
assertEquals("a",((StringColumnStatistics)stats[2]).getMinimum());
assertEquals(7,stats[2].getNumberOfValues());
assertEquals("count: 7 hasNull: true min: a max: h sum: 7",stats[2].toString());
StructObjectInspector readerInspector=(StructObjectInspector)reader.getObjectInspector();
assertEquals(ObjectInspector.Category.STRUCT,readerInspector.getCategory());
assertEquals("struct>>",readerInspector.getTypeName());
RecordReader rows=reader.rows();
List expected=Lists.newArrayList();
for ( StripeInformation sinfo : reader.getStripes()) {
expected.add(false);
}
expected.set(expected.size() - 1,true);
List got=Lists.newArrayList();
for ( StripeInformation sinfo : reader.getStripes()) {
OrcProto.StripeFooter sf=((RecordReaderImpl)rows).readStripeFooter(sinfo);
got.add(sf.toString().indexOf(OrcProto.Stream.Kind.PRESENT.toString()) != -1);
}
assertEquals(expected,got);
OrcStruct row=(OrcStruct)rows.next(null);
assertNotNull(row);
assertEquals(new IntWritable(3),row.getFieldValue(0));
assertEquals("a",row.getFieldValue(1).toString());
assertEquals(new BooleanWritable(true),row.getFieldValue(2));
assertEquals(new IntWritable(100),((OrcStruct)((ArrayList>)row.getFieldValue(3)).get(0)).getFieldValue(0));
row=(OrcStruct)rows.next(row);
assertNotNull(row);
assertNull(row.getFieldValue(0));
assertEquals("b",row.getFieldValue(1).toString());
assertEquals(new BooleanWritable(true),row.getFieldValue(2));
assertEquals(new IntWritable(100),((OrcStruct)((ArrayList>)row.getFieldValue(3)).get(0)).getFieldValue(0));
row=(OrcStruct)rows.next(row);
assertNotNull(row);
assertNull(row.getFieldValue(1));
assertEquals(new IntWritable(3),row.getFieldValue(0));
assertEquals(new BooleanWritable(false),row.getFieldValue(2));
assertEquals(new IntWritable(100),((OrcStruct)((ArrayList>)row.getFieldValue(3)).get(0)).getFieldValue(0));
rows.close();
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testMultiStripeWithoutNull() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcNullOptimization.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(MyStruct.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000));
Random rand=new Random(100);
for (int i=1; i < 20000; i++) {
writer.addRow(new MyStruct(rand.nextInt(1),"a",true,Lists.newArrayList(new InnerStruct(100))));
}
writer.addRow(new MyStruct(0,"b",true,Lists.newArrayList(new InnerStruct(100))));
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
ColumnStatistics[] stats=reader.getStatistics();
assertEquals(20000,reader.getNumberOfRows());
assertEquals(20000,stats[0].getNumberOfValues());
assertEquals(0,((IntegerColumnStatistics)stats[1]).getMaximum());
assertEquals(0,((IntegerColumnStatistics)stats[1]).getMinimum());
assertEquals(true,((IntegerColumnStatistics)stats[1]).isSumDefined());
assertEquals(0,((IntegerColumnStatistics)stats[1]).getSum());
assertEquals("count: 20000 hasNull: false min: 0 max: 0 sum: 0",stats[1].toString());
assertEquals("b",((StringColumnStatistics)stats[2]).getMaximum());
assertEquals("a",((StringColumnStatistics)stats[2]).getMinimum());
assertEquals(20000,stats[2].getNumberOfValues());
assertEquals("count: 20000 hasNull: false min: a max: b sum: 20000",stats[2].toString());
StructObjectInspector readerInspector=(StructObjectInspector)reader.getObjectInspector();
assertEquals(ObjectInspector.Category.STRUCT,readerInspector.getCategory());
assertEquals("struct>>",readerInspector.getTypeName());
RecordReader rows=reader.rows();
List expected=Lists.newArrayList();
for ( StripeInformation sinfo : reader.getStripes()) {
expected.add(false);
}
List got=Lists.newArrayList();
for ( StripeInformation sinfo : reader.getStripes()) {
OrcProto.StripeFooter sf=((RecordReaderImpl)rows).readStripeFooter(sinfo);
got.add(sf.toString().indexOf(OrcProto.Stream.Kind.PRESENT.toString()) != -1);
}
assertEquals(expected,got);
rows.seekToRow(19998);
OrcStruct row=(OrcStruct)rows.next(null);
assertNotNull(row);
assertNotNull(row.getFieldValue(1));
assertEquals(new IntWritable(0),row.getFieldValue(0));
assertEquals("a",row.getFieldValue(1).toString());
assertEquals(new BooleanWritable(true),row.getFieldValue(2));
assertEquals(new IntWritable(100),((OrcStruct)((ArrayList>)row.getFieldValue(3)).get(0)).getFieldValue(0));
row=(OrcStruct)rows.next(row);
assertNotNull(row);
assertNotNull(row.getFieldValue(0));
assertNotNull(row.getFieldValue(1));
assertEquals("b",row.getFieldValue(1).toString());
assertEquals(new BooleanWritable(true),row.getFieldValue(2));
assertEquals(new IntWritable(100),((OrcStruct)((ArrayList>)row.getFieldValue(3)).get(0)).getFieldValue(0));
rows.close();
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testMultiStripeWithNull() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcNullOptimization.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(MyStruct.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000));
Random rand=new Random(100);
writer.addRow(new MyStruct(null,null,true,Lists.newArrayList(new InnerStruct(100))));
for (int i=2; i < 20000; i++) {
writer.addRow(new MyStruct(rand.nextInt(1),"a",true,Lists.newArrayList(new InnerStruct(100))));
}
writer.addRow(new MyStruct(null,null,true,Lists.newArrayList(new InnerStruct(100))));
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
ColumnStatistics[] stats=reader.getStatistics();
assertEquals(20000,reader.getNumberOfRows());
assertEquals(20000,stats[0].getNumberOfValues());
assertEquals(0,((IntegerColumnStatistics)stats[1]).getMaximum());
assertEquals(0,((IntegerColumnStatistics)stats[1]).getMinimum());
assertEquals(true,((IntegerColumnStatistics)stats[1]).isSumDefined());
assertEquals(0,((IntegerColumnStatistics)stats[1]).getSum());
assertEquals("count: 19998 hasNull: true min: 0 max: 0 sum: 0",stats[1].toString());
assertEquals("a",((StringColumnStatistics)stats[2]).getMaximum());
assertEquals("a",((StringColumnStatistics)stats[2]).getMinimum());
assertEquals(19998,stats[2].getNumberOfValues());
assertEquals("count: 19998 hasNull: true min: a max: a sum: 19998",stats[2].toString());
StructObjectInspector readerInspector=(StructObjectInspector)reader.getObjectInspector();
assertEquals(ObjectInspector.Category.STRUCT,readerInspector.getCategory());
assertEquals("struct>>",readerInspector.getTypeName());
RecordReader rows=reader.rows();
List expected=Lists.newArrayList();
for ( StripeInformation sinfo : reader.getStripes()) {
expected.add(false);
}
expected.set(0,true);
expected.set(expected.size() - 1,true);
List got=Lists.newArrayList();
for ( StripeInformation sinfo : reader.getStripes()) {
OrcProto.StripeFooter sf=((RecordReaderImpl)rows).readStripeFooter(sinfo);
got.add(sf.toString().indexOf(OrcProto.Stream.Kind.PRESENT.toString()) != -1);
}
assertEquals(expected,got);
OrcStruct row=(OrcStruct)rows.next(null);
assertNotNull(row);
assertNull(row.getFieldValue(0));
assertNull(row.getFieldValue(1));
assertEquals(new BooleanWritable(true),row.getFieldValue(2));
assertEquals(new IntWritable(100),((OrcStruct)((ArrayList>)row.getFieldValue(3)).get(0)).getFieldValue(0));
rows.seekToRow(19998);
row=(OrcStruct)rows.next(null);
assertNotNull(row);
assertNotNull(row.getFieldValue(1));
assertEquals(new IntWritable(0),row.getFieldValue(0));
assertEquals(new BooleanWritable(true),row.getFieldValue(2));
assertEquals(new IntWritable(100),((OrcStruct)((ArrayList>)row.getFieldValue(3)).get(0)).getFieldValue(0));
row=(OrcStruct)rows.next(row);
assertNotNull(row);
assertNull(row.getFieldValue(0));
assertNull(row.getFieldValue(1));
assertEquals(new BooleanWritable(true),row.getFieldValue(2));
assertEquals(new IntWritable(100),((OrcStruct)((ArrayList>)row.getFieldValue(3)).get(0)).getFieldValue(0));
rows.close();
}
Class: org.apache.hadoop.hive.ql.io.orc.TestOrcRawRecordMerger APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testEmpty() throws Exception {
final int BUCKET=0;
Configuration conf=new Configuration();
OrcOutputFormat of=new OrcOutputFormat();
FileSystem fs=FileSystem.getLocal(conf);
Path root=new Path(tmpDir,"testEmpty").makeQualified(fs);
fs.delete(root,true);
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(MyRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
AcidOutputFormat.Options options=new AcidOutputFormat.Options(conf).inspector(inspector).bucket(BUCKET).writingBase(true).maximumTransactionId(100).finalDestination(root);
of.getRecordUpdater(root,options).close(false);
ValidTxnList txnList=new ValidReadTxnList("200:");
AcidUtils.Directory directory=AcidUtils.getAcidState(root,conf,txnList);
Path basePath=AcidUtils.createBucketFile(directory.getBaseDirectory(),BUCKET);
Reader baseReader=OrcFile.createReader(basePath,OrcFile.readerOptions(conf));
conf.set(IOConstants.SCHEMA_EVOLUTION_COLUMNS,MyRow.getColumnNamesProperty());
conf.set(IOConstants.SCHEMA_EVOLUTION_COLUMNS_TYPES,MyRow.getColumnTypesProperty());
HiveConf.setBoolVar(conf,HiveConf.ConfVars.HIVE_TRANSACTIONAL_TABLE_SCAN,true);
OrcRawRecordMerger merger=new OrcRawRecordMerger(conf,true,baseReader,false,BUCKET,createMaximalTxnList(),new Reader.Options(),AcidUtils.getPaths(directory.getCurrentDirectories()));
RecordIdentifier key=merger.createKey();
OrcStruct value=merger.createValue();
assertEquals(false,merger.next(key,value));
}
InternalCallVerifier EqualityVerifier
@Test public void testOriginalReaderPair() throws Exception {
ReaderKey key=new ReaderKey();
Reader reader=createMockOriginalReader();
RecordIdentifier minKey=new RecordIdentifier(0,10,1);
RecordIdentifier maxKey=new RecordIdentifier(0,10,3);
boolean[] includes=new boolean[]{true,true};
ReaderPair pair=new OriginalReaderPair(key,reader,10,minKey,maxKey,new Reader.Options().include(includes));
RecordReader recordReader=pair.recordReader;
assertEquals(0,key.getTransactionId());
assertEquals(10,key.getBucketId());
assertEquals(2,key.getRowId());
assertEquals(0,key.getCurrentTransactionId());
assertEquals("third",value(pair.nextRecord));
pair.next(pair.nextRecord);
assertEquals(0,key.getTransactionId());
assertEquals(10,key.getBucketId());
assertEquals(3,key.getRowId());
assertEquals(0,key.getCurrentTransactionId());
assertEquals("fourth",value(pair.nextRecord));
pair.next(pair.nextRecord);
assertEquals(null,pair.nextRecord);
Mockito.verify(recordReader).close();
}
InternalCallVerifier EqualityVerifier
@Test public void testReaderPair() throws Exception {
ReaderKey key=new ReaderKey();
Reader reader=createMockReader();
RecordIdentifier minKey=new RecordIdentifier(10,20,30);
RecordIdentifier maxKey=new RecordIdentifier(40,50,60);
ReaderPair pair=new ReaderPair(key,reader,20,minKey,maxKey,new Reader.Options(),0);
RecordReader recordReader=pair.recordReader;
assertEquals(10,key.getTransactionId());
assertEquals(20,key.getBucketId());
assertEquals(40,key.getRowId());
assertEquals(120,key.getCurrentTransactionId());
assertEquals("third",value(pair.nextRecord));
pair.next(pair.nextRecord);
assertEquals(40,key.getTransactionId());
assertEquals(50,key.getBucketId());
assertEquals(60,key.getRowId());
assertEquals(130,key.getCurrentTransactionId());
assertEquals("fourth",value(pair.nextRecord));
pair.next(pair.nextRecord);
assertEquals(null,pair.nextRecord);
Mockito.verify(recordReader).close();
}
IterativeVerifier InternalCallVerifier EqualityVerifier
/**
* Test the RecordReader when there is a new base and a delta.
* @throws Exception
*/
@Test public void testRecordReaderDelta() throws Exception {
final int BUCKET=0;
Configuration conf=new Configuration();
OrcOutputFormat of=new OrcOutputFormat();
FileSystem fs=FileSystem.getLocal(conf);
Path root=new Path(tmpDir,"testRecordReaderDelta").makeQualified(fs);
fs.delete(root,true);
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(MyRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
AcidOutputFormat.Options options=new AcidOutputFormat.Options(conf).bucket(BUCKET).inspector(inspector).filesystem(fs).writingBase(false).minimumTransactionId(1).maximumTransactionId(1).finalDestination(root);
RecordUpdater ru=of.getRecordUpdater(root,options);
String[] values=new String[]{"a","b","c","d","e"};
for (int i=0; i < values.length; ++i) {
ru.insert(1,new MyRow(values[i]));
}
ru.close(false);
options.minimumTransactionId(2).maximumTransactionId(2);
ru=of.getRecordUpdater(root,options);
values=new String[]{"f","g","h","i","j"};
for (int i=0; i < values.length; ++i) {
ru.insert(2,new MyRow(values[i]));
}
ru.close(false);
InputFormat inf=new OrcInputFormat();
JobConf job=new JobConf();
job.set("mapred.min.split.size","1");
job.set("mapred.max.split.size","2");
job.set("mapred.input.dir",root.toString());
job.set("bucket_count","1");
job.set(IOConstants.SCHEMA_EVOLUTION_COLUMNS,MyRow.getColumnNamesProperty());
job.set(IOConstants.SCHEMA_EVOLUTION_COLUMNS_TYPES,MyRow.getColumnTypesProperty());
HiveConf.setBoolVar(job,HiveConf.ConfVars.HIVE_TRANSACTIONAL_TABLE_SCAN,true);
InputSplit[] splits=inf.getSplits(job,5);
assertEquals(1,splits.length);
org.apache.hadoop.mapred.RecordReader rr;
rr=inf.getRecordReader(splits[0],job,Reporter.NULL);
values=new String[]{"a","b","c","d","e","f","g","h","i","j"};
OrcStruct row=rr.createValue();
for (int i=0; i < values.length; ++i) {
System.out.println("Checking " + i);
assertEquals(true,rr.next(NullWritable.get(),row));
assertEquals(values[i],row.getFieldValue(0).toString());
}
assertEquals(false,rr.next(NullWritable.get(),row));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testNewBase() throws Exception {
Configuration conf=new Configuration();
conf.set(IOConstants.SCHEMA_EVOLUTION_COLUMNS,"col1");
conf.set(IOConstants.SCHEMA_EVOLUTION_COLUMNS_TYPES,"string");
HiveConf.setBoolVar(conf,HiveConf.ConfVars.HIVE_TRANSACTIONAL_TABLE_SCAN,true);
Reader reader=Mockito.mock(Reader.class,settings);
RecordReader recordReader=Mockito.mock(RecordReader.class,settings);
List types=new ArrayList();
OrcProto.Type.Builder typeBuilder=OrcProto.Type.newBuilder();
typeBuilder.setKind(OrcProto.Type.Kind.STRUCT).addSubtypes(1).addSubtypes(2).addSubtypes(3).addSubtypes(4).addSubtypes(5).addSubtypes(6);
typeBuilder.addAllFieldNames(Lists.newArrayList("operation","originalTransaction","bucket","rowId","currentTransaction","row"));
types.add(typeBuilder.build());
types.add(null);
types.add(null);
types.add(null);
types.add(null);
types.add(null);
typeBuilder.clearSubtypes();
typeBuilder.addSubtypes(7);
typeBuilder.addAllFieldNames(Lists.newArrayList("col1"));
types.add(typeBuilder.build());
typeBuilder.clear();
typeBuilder.setKind(OrcProto.Type.Kind.STRING);
types.add(typeBuilder.build());
Mockito.when(reader.getTypes()).thenReturn(types);
Mockito.when(reader.rowsOptions(Mockito.any(Reader.Options.class))).thenReturn(recordReader);
OrcStruct row1=new OrcStruct(OrcRecordUpdater.FIELDS);
setRow(row1,OrcRecordUpdater.INSERT_OPERATION,10,20,20,100,"first");
OrcStruct row2=new OrcStruct(OrcRecordUpdater.FIELDS);
setRow(row2,OrcRecordUpdater.INSERT_OPERATION,10,20,30,110,"second");
OrcStruct row3=new OrcStruct(OrcRecordUpdater.FIELDS);
setRow(row3,OrcRecordUpdater.INSERT_OPERATION,10,20,40,120,"third");
OrcStruct row4=new OrcStruct(OrcRecordUpdater.FIELDS);
setRow(row4,OrcRecordUpdater.INSERT_OPERATION,40,50,60,130,"fourth");
OrcStruct row5=new OrcStruct(OrcRecordUpdater.FIELDS);
setRow(row5,OrcRecordUpdater.INSERT_OPERATION,40,50,61,140,"fifth");
Mockito.when(recordReader.hasNext()).thenReturn(true,true,true,true,true,false);
Mockito.when(recordReader.getProgress()).thenReturn(1.0f);
Mockito.when(recordReader.next(null)).thenReturn(row1,row4);
Mockito.when(recordReader.next(row1)).thenReturn(row2);
Mockito.when(recordReader.next(row2)).thenReturn(row3);
Mockito.when(recordReader.next(row3)).thenReturn(row5);
Mockito.when(reader.getMetadataValue(OrcRecordUpdater.ACID_KEY_INDEX_NAME)).thenReturn(ByteBuffer.wrap("10,20,30;40,50,60;40,50,61".getBytes("UTF-8")));
Mockito.when(reader.getStripes()).thenReturn(createStripes(2,2,1));
OrcRawRecordMerger merger=new OrcRawRecordMerger(conf,false,reader,false,10,createMaximalTxnList(),new Reader.Options().range(1000,1000),null);
RecordReader rr=merger.getCurrentReader().recordReader;
assertEquals(0,merger.getOtherReaders().size());
assertEquals(new RecordIdentifier(10,20,30),merger.getMinKey());
assertEquals(new RecordIdentifier(40,50,60),merger.getMaxKey());
RecordIdentifier id=merger.createKey();
OrcStruct event=merger.createValue();
assertEquals(true,merger.next(id,event));
assertEquals(10,id.getTransactionId());
assertEquals(20,id.getBucketId());
assertEquals(40,id.getRowId());
assertEquals("third",getValue(event));
assertEquals(true,merger.next(id,event));
assertEquals(40,id.getTransactionId());
assertEquals(50,id.getBucketId());
assertEquals(60,id.getRowId());
assertEquals("fourth",getValue(event));
assertEquals(false,merger.next(id,event));
assertEquals(1.0,merger.getProgress(),0.01);
merger.close();
Mockito.verify(rr).close();
Mockito.verify(rr).getProgress();
StructObjectInspector eventObjectInspector=(StructObjectInspector)merger.getObjectInspector();
List extends StructField> fields=eventObjectInspector.getAllStructFieldRefs();
assertEquals(OrcRecordUpdater.FIELDS,fields.size());
assertEquals("operation",fields.get(OrcRecordUpdater.OPERATION).getFieldName());
assertEquals("currentTransaction",fields.get(OrcRecordUpdater.CURRENT_TRANSACTION).getFieldName());
assertEquals("originalTransaction",fields.get(OrcRecordUpdater.ORIGINAL_TRANSACTION).getFieldName());
assertEquals("bucket",fields.get(OrcRecordUpdater.BUCKET).getFieldName());
assertEquals("rowId",fields.get(OrcRecordUpdater.ROW_ID).getFieldName());
StructObjectInspector rowObjectInspector=(StructObjectInspector)fields.get(OrcRecordUpdater.ROW).getFieldObjectInspector();
assertEquals("col1",rowObjectInspector.getAllStructFieldRefs().get(0).getFieldName());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testOrdering() throws Exception {
ReaderKey left=new ReaderKey(100,200,1200,300);
ReaderKey right=new ReaderKey();
right.setValues(100,200,1000,200,1);
assertTrue(right.compareTo(left) < 0);
assertTrue(left.compareTo(right) > 0);
assertEquals(false,left.equals(right));
left.set(right);
assertTrue(right.compareTo(left) == 0);
assertEquals(true,right.equals(left));
right.setRowId(2000);
assertTrue(right.compareTo(left) > 0);
left.setValues(1,2,3,4,-1);
right.setValues(100,2,3,4,-1);
assertTrue(left.compareTo(right) < 0);
assertTrue(right.compareTo(left) > 0);
left.setValues(1,2,3,4,-1);
right.setValues(1,100,3,4,-1);
assertTrue(left.compareTo(right) < 0);
assertTrue(right.compareTo(left) > 0);
left.setValues(1,2,3,100,-1);
right.setValues(1,2,3,4,-1);
assertTrue(left.compareTo(right) < 0);
assertTrue(right.compareTo(left) > 0);
RecordIdentifier ri=new RecordIdentifier(1,2,3);
assertEquals(1,ri.compareTo(left));
assertEquals(-1,left.compareTo(ri));
assertEquals(false,ri.equals(left));
assertEquals(false,left.equals(ri));
}
IterativeVerifier InternalCallVerifier EqualityVerifier
/**
* Test the RecordReader when there is a new base and a delta.
* @throws Exception
*/
@Test public void testRecordReaderNewBaseAndDelta() throws Exception {
final int BUCKET=11;
Configuration conf=new Configuration();
OrcOutputFormat of=new OrcOutputFormat();
FileSystem fs=FileSystem.getLocal(conf);
Path root=new Path(tmpDir,"testRecordReaderNewBaseAndDelta").makeQualified(fs);
fs.delete(root,true);
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(BigRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
MemoryManager mgr=new MemoryManager(conf){
int rowsAddedSinceCheck=0;
@Override public synchronized void addedRow( int rows) throws IOException {
rowsAddedSinceCheck+=rows;
if (rowsAddedSinceCheck >= 2) {
notifyWriters();
rowsAddedSinceCheck=0;
}
}
}
;
OrcRecordUpdater.OrcOptions options=(OrcRecordUpdater.OrcOptions)new OrcRecordUpdater.OrcOptions(conf).writingBase(true).minimumTransactionId(0).maximumTransactionId(0).bucket(BUCKET).inspector(inspector).filesystem(fs);
options.orcOptions(OrcFile.writerOptions(conf).stripeSize(1).blockPadding(false).compress(CompressionKind.NONE).memory(mgr).batchSize(2));
options.finalDestination(root);
RecordUpdater ru=of.getRecordUpdater(root,options);
String[] values=new String[]{"ignore.1","0.1","ignore.2","ignore.3","2.0","2.1","3.0","ignore.4","ignore.5","ignore.6"};
for (int i=0; i < values.length; ++i) {
ru.insert(0,new BigRow(i,i,values[i],i,i));
}
ru.close(false);
options.writingBase(false).minimumTransactionId(1).maximumTransactionId(1).recordIdColumn(5);
ru=of.getRecordUpdater(root,options);
values=new String[]{"0.0",null,null,"1.1",null,null,null,"ignore.7"};
for (int i=0; i < values.length; ++i) {
if (values[i] != null) {
ru.update(1,new BigRow(i,i,values[i],i,i,i,0,BUCKET));
}
}
ru.delete(100,new BigRow(9,0,BUCKET));
ru.close(false);
options.minimumTransactionId(2).maximumTransactionId(2);
ru=of.getRecordUpdater(root,options);
values=new String[]{null,null,"1.0",null,null,null,null,"3.1"};
for (int i=0; i < values.length; ++i) {
if (values[i] != null) {
ru.update(2,new BigRow(i,i,values[i],i,i,i,0,BUCKET));
}
}
ru.delete(100,new BigRow(8,0,BUCKET));
ru.close(false);
InputFormat inf=new OrcInputFormat();
JobConf job=new JobConf();
job.set("mapred.min.split.size","1");
job.set("mapred.max.split.size","2");
job.set("mapred.input.dir",root.toString());
job.set(IOConstants.SCHEMA_EVOLUTION_COLUMNS,BigRow.getColumnNamesProperty());
job.set(IOConstants.SCHEMA_EVOLUTION_COLUMNS_TYPES,BigRow.getColumnTypesProperty());
HiveConf.setBoolVar(job,HiveConf.ConfVars.HIVE_TRANSACTIONAL_TABLE_SCAN,true);
InputSplit[] splits=inf.getSplits(job,5);
assertEquals(5,splits.length);
org.apache.hadoop.mapred.RecordReader rr;
for (int i=0; i < 4; ++i) {
System.out.println("starting split " + i + " = "+ splits[i]);
rr=inf.getRecordReader(splits[i],job,Reporter.NULL);
NullWritable key=rr.createKey();
OrcStruct value=rr.createValue();
for (int j=0; j < 2; ++j) {
System.out.println("i = " + i + ", j = "+ j);
assertEquals(true,rr.next(key,value));
System.out.println("record = " + value);
assertEquals(i + "." + j,value.getFieldValue(2).toString());
}
assertEquals(false,rr.next(key,value));
}
rr=inf.getRecordReader(splits[4],job,Reporter.NULL);
assertEquals(false,rr.next(rr.createKey(),rr.createValue()));
}
InternalCallVerifier EqualityVerifier
@Test public void testOriginalReaderPairNoMin() throws Exception {
ReaderKey key=new ReaderKey();
Reader reader=createMockOriginalReader();
ReaderPair pair=new OriginalReaderPair(key,reader,10,null,null,new Reader.Options());
assertEquals("first",value(pair.nextRecord));
assertEquals(0,key.getTransactionId());
assertEquals(10,key.getBucketId());
assertEquals(0,key.getRowId());
assertEquals(0,key.getCurrentTransactionId());
pair.next(pair.nextRecord);
assertEquals("second",value(pair.nextRecord));
assertEquals(0,key.getTransactionId());
assertEquals(10,key.getBucketId());
assertEquals(1,key.getRowId());
assertEquals(0,key.getCurrentTransactionId());
pair.next(pair.nextRecord);
assertEquals("third",value(pair.nextRecord));
assertEquals(0,key.getTransactionId());
assertEquals(10,key.getBucketId());
assertEquals(2,key.getRowId());
assertEquals(0,key.getCurrentTransactionId());
pair.next(pair.nextRecord);
assertEquals("fourth",value(pair.nextRecord));
assertEquals(0,key.getTransactionId());
assertEquals(10,key.getBucketId());
assertEquals(3,key.getRowId());
assertEquals(0,key.getCurrentTransactionId());
pair.next(pair.nextRecord);
assertEquals("fifth",value(pair.nextRecord));
assertEquals(0,key.getTransactionId());
assertEquals(10,key.getBucketId());
assertEquals(4,key.getRowId());
assertEquals(0,key.getCurrentTransactionId());
pair.next(pair.nextRecord);
assertEquals(null,pair.nextRecord);
Mockito.verify(pair.recordReader).close();
}
IterativeVerifier InternalCallVerifier EqualityVerifier
/**
* Test the OrcRecordUpdater with the OrcRawRecordMerger when there is
* a base and a delta.
* @throws Exception
*/
@Test public void testRecordReaderOldBaseAndDelta() throws Exception {
final int BUCKET=10;
Configuration conf=new Configuration();
OrcOutputFormat of=new OrcOutputFormat();
FileSystem fs=FileSystem.getLocal(conf);
Path root=new Path(tmpDir,"testOldBaseAndDelta").makeQualified(fs);
fs.delete(root,true);
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(BigRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
MemoryManager mgr=new MemoryManager(conf){
int rowsAddedSinceCheck=0;
@Override public synchronized void addedRow( int rows) throws IOException {
rowsAddedSinceCheck+=rows;
if (rowsAddedSinceCheck >= 2) {
notifyWriters();
rowsAddedSinceCheck=0;
}
}
}
;
Writer writer=OrcFile.createWriter(new Path(root,"0000010_0"),OrcFile.writerOptions(conf).inspector(inspector).fileSystem(fs).blockPadding(false).bufferSize(10000).compress(CompressionKind.NONE).stripeSize(1).memory(mgr).batchSize(2).version(OrcFile.Version.V_0_11));
String[] values=new String[]{"ignore.1","0.1","ignore.2","ignore.3","2.0","2.1","3.0","ignore.4","ignore.5","ignore.6"};
for (int i=0; i < values.length; ++i) {
writer.addRow(new BigRow(i,i,values[i],i,i));
}
writer.close();
AcidOutputFormat.Options options=new AcidOutputFormat.Options(conf).writingBase(false).minimumTransactionId(1).maximumTransactionId(1).bucket(BUCKET).inspector(inspector).filesystem(fs).recordIdColumn(5).finalDestination(root);
RecordUpdater ru=of.getRecordUpdater(root,options);
values=new String[]{"0.0",null,null,"1.1",null,null,null,"ignore.7"};
for (int i=0; i < values.length; ++i) {
if (values[i] != null) {
ru.update(1,new BigRow(i,i,values[i],i,i,i,0,BUCKET));
}
}
ru.delete(100,new BigRow(9,0,BUCKET));
ru.close(false);
options=options.minimumTransactionId(2).maximumTransactionId(2);
ru=of.getRecordUpdater(root,options);
values=new String[]{null,null,"1.0",null,null,null,null,"3.1"};
for (int i=0; i < values.length; ++i) {
if (values[i] != null) {
ru.update(2,new BigRow(i,i,values[i],i,i,i,0,BUCKET));
}
}
ru.delete(100,new BigRow(8,0,BUCKET));
ru.close(false);
InputFormat inf=new OrcInputFormat();
JobConf job=new JobConf();
job.set(IOConstants.SCHEMA_EVOLUTION_COLUMNS,BigRow.getColumnNamesProperty());
job.set(IOConstants.SCHEMA_EVOLUTION_COLUMNS_TYPES,BigRow.getColumnTypesProperty());
HiveConf.setBoolVar(job,HiveConf.ConfVars.HIVE_TRANSACTIONAL_TABLE_SCAN,true);
job.set("mapred.min.split.size","1");
job.set("mapred.max.split.size","2");
job.set("mapred.input.dir",root.toString());
InputSplit[] splits=inf.getSplits(job,5);
assertEquals(5,splits.length);
org.apache.hadoop.mapred.RecordReader rr;
for (int i=0; i < 4; ++i) {
System.out.println("starting split " + i);
rr=inf.getRecordReader(splits[i],job,Reporter.NULL);
NullWritable key=rr.createKey();
OrcStruct value=rr.createValue();
for (int j=0; j < 2; ++j) {
System.out.println("i = " + i + ", j = "+ j);
assertEquals(true,rr.next(key,value));
System.out.println("record = " + value);
assertEquals(i + "." + j,value.getFieldValue(2).toString());
}
assertEquals(false,rr.next(key,value));
}
rr=inf.getRecordReader(splits[4],job,Reporter.NULL);
assertEquals(false,rr.next(rr.createKey(),rr.createValue()));
}
InternalCallVerifier EqualityVerifier
@Test public void testReaderPairNoMin() throws Exception {
ReaderKey key=new ReaderKey();
Reader reader=createMockReader();
ReaderPair pair=new ReaderPair(key,reader,20,null,null,new Reader.Options(),0);
RecordReader recordReader=pair.recordReader;
assertEquals(10,key.getTransactionId());
assertEquals(20,key.getBucketId());
assertEquals(20,key.getRowId());
assertEquals(100,key.getCurrentTransactionId());
assertEquals("first",value(pair.nextRecord));
pair.next(pair.nextRecord);
assertEquals(10,key.getTransactionId());
assertEquals(20,key.getBucketId());
assertEquals(30,key.getRowId());
assertEquals(110,key.getCurrentTransactionId());
assertEquals("second",value(pair.nextRecord));
pair.next(pair.nextRecord);
assertEquals(10,key.getTransactionId());
assertEquals(20,key.getBucketId());
assertEquals(40,key.getRowId());
assertEquals(120,key.getCurrentTransactionId());
assertEquals("third",value(pair.nextRecord));
pair.next(pair.nextRecord);
assertEquals(40,key.getTransactionId());
assertEquals(50,key.getBucketId());
assertEquals(60,key.getRowId());
assertEquals(130,key.getCurrentTransactionId());
assertEquals("fourth",value(pair.nextRecord));
pair.next(pair.nextRecord);
assertEquals(40,key.getTransactionId());
assertEquals(50,key.getBucketId());
assertEquals(61,key.getRowId());
assertEquals(140,key.getCurrentTransactionId());
assertEquals("fifth",value(pair.nextRecord));
pair.next(pair.nextRecord);
assertEquals(null,pair.nextRecord);
Mockito.verify(recordReader).close();
}
Class: org.apache.hadoop.hive.ql.io.orc.TestOrcRecordUpdater APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testUpdates() throws Exception {
Path root=new Path(workDir,"testUpdates");
Configuration conf=new Configuration();
FileSystem fs=root.getFileSystem(conf);
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(MyRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
int bucket=20;
AcidOutputFormat.Options options=new AcidOutputFormat.Options(conf).filesystem(fs).bucket(bucket).writingBase(false).minimumTransactionId(100).maximumTransactionId(100).inspector(inspector).reporter(Reporter.NULL).recordIdColumn(1).finalDestination(root);
RecordUpdater updater=new OrcRecordUpdater(root,options);
updater.update(100,new MyRow("update",30,10,bucket));
updater.delete(100,new MyRow("",60,40,bucket));
assertEquals(-1L,updater.getStats().getRowCount());
updater.close(false);
Path bucketPath=AcidUtils.createFilename(root,options);
Reader reader=OrcFile.createReader(bucketPath,new OrcFile.ReaderOptions(conf).filesystem(fs));
assertEquals(2,reader.getNumberOfRows());
RecordReader rows=reader.rows();
assertEquals(true,rows.hasNext());
OrcStruct row=(OrcStruct)rows.next(null);
assertEquals(OrcRecordUpdater.UPDATE_OPERATION,OrcRecordUpdater.getOperation(row));
assertEquals(100,OrcRecordUpdater.getCurrentTransaction(row));
assertEquals(10,OrcRecordUpdater.getOriginalTransaction(row));
assertEquals(20,OrcRecordUpdater.getBucket(row));
assertEquals(30,OrcRecordUpdater.getRowId(row));
assertEquals("update",OrcRecordUpdater.getRow(row).getFieldValue(0).toString());
assertEquals(true,rows.hasNext());
row=(OrcStruct)rows.next(null);
assertEquals(100,OrcRecordUpdater.getCurrentTransaction(row));
assertEquals(40,OrcRecordUpdater.getOriginalTransaction(row));
assertEquals(20,OrcRecordUpdater.getBucket(row));
assertEquals(60,OrcRecordUpdater.getRowId(row));
assertNull(OrcRecordUpdater.getRow(row));
assertEquals(false,rows.hasNext());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testWriter() throws Exception {
Path root=new Path(workDir,"testWriter");
Configuration conf=new Configuration();
FileSystem fs=FileSystem.getLocal(conf).getRaw();
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(MyRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
AcidOutputFormat.Options options=new AcidOutputFormat.Options(conf).filesystem(fs).bucket(10).writingBase(false).minimumTransactionId(10).maximumTransactionId(19).inspector(inspector).reporter(Reporter.NULL).finalDestination(root);
RecordUpdater updater=new OrcRecordUpdater(root,options);
updater.insert(11,new MyRow("first"));
updater.insert(11,new MyRow("second"));
updater.insert(11,new MyRow("third"));
updater.flush();
updater.insert(12,new MyRow("fourth"));
updater.insert(12,new MyRow("fifth"));
updater.flush();
assertEquals(5L,updater.getStats().getRowCount());
Path bucketPath=AcidUtils.createFilename(root,options);
Path sidePath=OrcRecordUpdater.getSideFile(bucketPath);
DataInputStream side=fs.open(sidePath);
long len=side.readLong();
Reader reader=OrcFile.createReader(bucketPath,new OrcFile.ReaderOptions(conf).filesystem(fs).maxLength(len));
assertEquals(3,reader.getNumberOfRows());
len=side.readLong();
side.close();
reader=OrcFile.createReader(bucketPath,new OrcFile.ReaderOptions(conf).filesystem(fs).maxLength(len));
assertEquals(5,reader.getNumberOfRows());
RecordReader rows=reader.rows();
assertEquals(true,rows.hasNext());
OrcStruct row=(OrcStruct)rows.next(null);
assertEquals(OrcRecordUpdater.INSERT_OPERATION,OrcRecordUpdater.getOperation(row));
assertEquals(11,OrcRecordUpdater.getCurrentTransaction(row));
assertEquals(11,OrcRecordUpdater.getOriginalTransaction(row));
assertEquals(10,OrcRecordUpdater.getBucket(row));
assertEquals(0,OrcRecordUpdater.getRowId(row));
assertEquals("first",OrcRecordUpdater.getRow(row).getFieldValue(0).toString());
assertEquals(true,rows.hasNext());
row=(OrcStruct)rows.next(null);
assertEquals(1,OrcRecordUpdater.getRowId(row));
assertEquals(10,OrcRecordUpdater.getBucket(row));
assertEquals("second",OrcRecordUpdater.getRow(row).getFieldValue(0).toString());
assertEquals(true,rows.hasNext());
row=(OrcStruct)rows.next(null);
assertEquals(2,OrcRecordUpdater.getRowId(row));
assertEquals(10,OrcRecordUpdater.getBucket(row));
assertEquals("third",OrcRecordUpdater.getRow(row).getFieldValue(0).toString());
assertEquals(true,rows.hasNext());
row=(OrcStruct)rows.next(null);
assertEquals(12,OrcRecordUpdater.getCurrentTransaction(row));
assertEquals(12,OrcRecordUpdater.getOriginalTransaction(row));
assertEquals(10,OrcRecordUpdater.getBucket(row));
assertEquals(0,OrcRecordUpdater.getRowId(row));
assertEquals("fourth",OrcRecordUpdater.getRow(row).getFieldValue(0).toString());
assertEquals(true,rows.hasNext());
row=(OrcStruct)rows.next(null);
assertEquals(1,OrcRecordUpdater.getRowId(row));
assertEquals("fifth",OrcRecordUpdater.getRow(row).getFieldValue(0).toString());
assertEquals(false,rows.hasNext());
updater.insert(20,new MyRow("sixth"));
updater.close(false);
reader=OrcFile.createReader(bucketPath,new OrcFile.ReaderOptions(conf).filesystem(fs));
assertEquals(6,reader.getNumberOfRows());
assertEquals(6L,updater.getStats().getRowCount());
assertEquals(false,fs.exists(sidePath));
}
InternalCallVerifier EqualityVerifier
@Test public void testAccessors() throws Exception {
OrcStruct event=new OrcStruct(OrcRecordUpdater.FIELDS);
event.setFieldValue(OrcRecordUpdater.OPERATION,new IntWritable(OrcRecordUpdater.INSERT_OPERATION));
event.setFieldValue(OrcRecordUpdater.CURRENT_TRANSACTION,new LongWritable(100));
event.setFieldValue(OrcRecordUpdater.ORIGINAL_TRANSACTION,new LongWritable(50));
event.setFieldValue(OrcRecordUpdater.BUCKET,new IntWritable(200));
event.setFieldValue(OrcRecordUpdater.ROW_ID,new LongWritable(300));
assertEquals(OrcRecordUpdater.INSERT_OPERATION,OrcRecordUpdater.getOperation(event));
assertEquals(50,OrcRecordUpdater.getOriginalTransaction(event));
assertEquals(100,OrcRecordUpdater.getCurrentTransaction(event));
assertEquals(200,OrcRecordUpdater.getBucket(event));
assertEquals(300,OrcRecordUpdater.getRowId(event));
}
APIUtilityVerifier EqualityVerifier
@Test public void testWriterTblProperties() throws Exception {
Path root=new Path(workDir,"testWriterTblProperties");
Configuration conf=new Configuration();
FileSystem fs=FileSystem.getLocal(conf).getRaw();
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(MyRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Properties tblProps=new Properties();
tblProps.setProperty("orc.compress","SNAPPY");
AcidOutputFormat.Options options=new AcidOutputFormat.Options(conf).filesystem(fs).bucket(10).writingBase(false).minimumTransactionId(10).maximumTransactionId(19).inspector(inspector).reporter(Reporter.NULL).finalDestination(root).tableProperties(tblProps);
RecordUpdater updater=new OrcRecordUpdater(root,options);
updater.insert(11,new MyRow("first"));
updater.insert(11,new MyRow("second"));
updater.insert(11,new MyRow("third"));
updater.flush();
updater.insert(12,new MyRow("fourth"));
updater.insert(12,new MyRow("fifth"));
updater.flush();
PrintStream origOut=System.out;
ByteArrayOutputStream myOut=new ByteArrayOutputStream();
System.setOut(new PrintStream(myOut));
FileDump.main(new String[]{root.toUri().toString()});
System.out.flush();
String outDump=new String(myOut.toByteArray());
assertEquals(true,outDump.contains("Compression: SNAPPY"));
System.setOut(origOut);
updater.close(false);
}
Class: org.apache.hadoop.hive.ql.io.orc.TestOrcSerDeStats APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testOrcSerDeStatsMap() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcSerDeStats.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(MapStruct.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(10000).bufferSize(10000));
for (int row=0; row < 1000; row++) {
Map test=new HashMap();
for (int i=0; i < 10; i++) {
test.put("hi" + i,2.0);
}
writer.addRow(new MapStruct(test));
}
writer.close();
assertEquals(1000,writer.getNumberOfRows());
assertEquals(950000,writer.getRawDataSize());
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
assertEquals(1000,reader.getNumberOfRows());
assertEquals(950000,reader.getRawDataSize());
assertEquals(950000,reader.getRawDataSizeOfColumns(Lists.newArrayList("map1")));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testOrcSerDeStatsComplex() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcSerDeStats.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(BigRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000));
writer.addRow(new BigRow(false,(byte)1,(short)1024,65536,Long.MAX_VALUE,(float)1.0,-15.0,bytes(0,1,2,3,4),"hi",new MiddleStruct(inner(1,"bye"),inner(2,"sigh")),list(inner(3,"good"),inner(4,"bad")),map(),Timestamp.valueOf("2000-03-12 15:00:00"),HiveDecimal.create("12345678.6547456")));
writer.addRow(new BigRow(true,(byte)100,(short)2048,65536,Long.MAX_VALUE,(float)2.0,-5.0,bytes(),"bye",new MiddleStruct(inner(1,"bye"),inner(2,"sigh")),list(inner(100000000,"cat"),inner(-100000,"in"),inner(1234,"hat")),map(inner(5,"chani"),inner(1,"mauddib")),Timestamp.valueOf("2000-03-11 15:00:00"),HiveDecimal.create("12345678.6547452")));
writer.close();
long rowCount=writer.getNumberOfRows();
long rawDataSize=writer.getRawDataSize();
assertEquals(2,rowCount);
assertEquals(1740,rawDataSize);
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
assertEquals(2,reader.getNumberOfRows());
assertEquals(1740,reader.getRawDataSize());
assertEquals(8,reader.getRawDataSizeOfColumns(Lists.newArrayList("boolean1")));
assertEquals(8,reader.getRawDataSizeOfColumns(Lists.newArrayList("byte1")));
assertEquals(8,reader.getRawDataSizeOfColumns(Lists.newArrayList("short1")));
assertEquals(8,reader.getRawDataSizeOfColumns(Lists.newArrayList("int1")));
assertEquals(16,reader.getRawDataSizeOfColumns(Lists.newArrayList("long1")));
assertEquals(8,reader.getRawDataSizeOfColumns(Lists.newArrayList("float1")));
assertEquals(16,reader.getRawDataSizeOfColumns(Lists.newArrayList("double1")));
assertEquals(5,reader.getRawDataSizeOfColumns(Lists.newArrayList("bytes1")));
assertEquals(172,reader.getRawDataSizeOfColumns(Lists.newArrayList("string1")));
assertEquals(455,reader.getRawDataSizeOfColumns(Lists.newArrayList("list")));
assertEquals(368,reader.getRawDataSizeOfColumns(Lists.newArrayList("map")));
assertEquals(364,reader.getRawDataSizeOfColumns(Lists.newArrayList("middle")));
assertEquals(80,reader.getRawDataSizeOfColumns(Lists.newArrayList("ts")));
assertEquals(224,reader.getRawDataSizeOfColumns(Lists.newArrayList("decimal1")));
assertEquals(88,reader.getRawDataSizeOfColumns(Lists.newArrayList("ts","int1")));
assertEquals(1195,reader.getRawDataSizeOfColumns(Lists.newArrayList("middle","list","map","float1")));
assertEquals(185,reader.getRawDataSizeOfColumns(Lists.newArrayList("bytes1","byte1","string1")));
assertEquals(rawDataSize,reader.getRawDataSizeOfColumns(Lists.newArrayList("boolean1","byte1","short1","int1","long1","float1","double1","bytes1","string1","list","map","middle","ts","decimal1")));
ColumnStatistics[] stats=reader.getStatistics();
assertEquals(2,stats[1].getNumberOfValues());
assertEquals(1,((BooleanColumnStatistics)stats[1]).getFalseCount());
assertEquals(1,((BooleanColumnStatistics)stats[1]).getTrueCount());
assertEquals("count: 2 hasNull: false true: 1",stats[1].toString());
assertEquals(2048,((IntegerColumnStatistics)stats[3]).getMaximum());
assertEquals(1024,((IntegerColumnStatistics)stats[3]).getMinimum());
assertEquals(true,((IntegerColumnStatistics)stats[3]).isSumDefined());
assertEquals(3072,((IntegerColumnStatistics)stats[3]).getSum());
assertEquals("count: 2 hasNull: false min: 1024 max: 2048 sum: 3072",stats[3].toString());
assertEquals(Long.MAX_VALUE,((IntegerColumnStatistics)stats[5]).getMaximum());
assertEquals(Long.MAX_VALUE,((IntegerColumnStatistics)stats[5]).getMinimum());
assertEquals(false,((IntegerColumnStatistics)stats[5]).isSumDefined());
assertEquals("count: 2 hasNull: false min: 9223372036854775807 max: 9223372036854775807",stats[5].toString());
assertEquals(-15.0,((DoubleColumnStatistics)stats[7]).getMinimum());
assertEquals(-5.0,((DoubleColumnStatistics)stats[7]).getMaximum());
assertEquals(-20.0,((DoubleColumnStatistics)stats[7]).getSum(),0.00001);
assertEquals("count: 2 hasNull: false min: -15.0 max: -5.0 sum: -20.0",stats[7].toString());
assertEquals("count: 2 hasNull: false min: bye max: hi sum: 5",stats[9].toString());
}
APIUtilityVerifier BranchVerifier InternalCallVerifier EqualityVerifier ExceptionVerifier HybridVerifier
@Test(expected=ClassCastException.class) public void testSerdeStatsOldFormat() throws Exception {
Path oldFilePath=new Path(HiveTestUtils.getFileFromClasspath("orc-file-11-format.orc"));
Reader reader=OrcFile.createReader(oldFilePath,OrcFile.readerOptions(conf).filesystem(fs));
int stripeCount=0;
int rowCount=0;
long currentOffset=-1;
for ( StripeInformation stripe : reader.getStripes()) {
stripeCount+=1;
rowCount+=stripe.getNumberOfRows();
if (currentOffset < 0) {
currentOffset=stripe.getOffset() + stripe.getIndexLength() + stripe.getDataLength()+ stripe.getFooterLength();
}
else {
assertEquals(currentOffset,stripe.getOffset());
currentOffset+=stripe.getIndexLength() + stripe.getDataLength() + stripe.getFooterLength();
}
}
assertEquals(reader.getNumberOfRows(),rowCount);
assertEquals(6300000,reader.getRawDataSize());
assertEquals(2,stripeCount);
ColumnStatistics[] stats=reader.getStatistics();
assertEquals(7500,stats[1].getNumberOfValues());
assertEquals(3750,((BooleanColumnStatistics)stats[1]).getFalseCount());
assertEquals(3750,((BooleanColumnStatistics)stats[1]).getTrueCount());
assertEquals("count: 7500 hasNull: true true: 3750",stats[1].toString());
assertEquals(2048,((IntegerColumnStatistics)stats[3]).getMaximum());
assertEquals(1024,((IntegerColumnStatistics)stats[3]).getMinimum());
assertEquals(true,((IntegerColumnStatistics)stats[3]).isSumDefined());
assertEquals(11520000,((IntegerColumnStatistics)stats[3]).getSum());
assertEquals("count: 7500 hasNull: true min: 1024 max: 2048 sum: 11520000",stats[3].toString());
assertEquals(Long.MAX_VALUE,((IntegerColumnStatistics)stats[5]).getMaximum());
assertEquals(Long.MAX_VALUE,((IntegerColumnStatistics)stats[5]).getMinimum());
assertEquals(false,((IntegerColumnStatistics)stats[5]).isSumDefined());
assertEquals("count: 7500 hasNull: true min: 9223372036854775807 max: 9223372036854775807",stats[5].toString());
assertEquals(-15.0,((DoubleColumnStatistics)stats[7]).getMinimum());
assertEquals(-5.0,((DoubleColumnStatistics)stats[7]).getMaximum());
assertEquals(-75000.0,((DoubleColumnStatistics)stats[7]).getSum(),0.00001);
assertEquals("count: 7500 hasNull: true min: -15.0 max: -5.0 sum: -75000.0",stats[7].toString());
assertEquals("bye",((StringColumnStatistics)stats[9]).getMinimum());
assertEquals("hi",((StringColumnStatistics)stats[9]).getMaximum());
assertEquals(0,((StringColumnStatistics)stats[9]).getSum());
assertEquals("count: 7500 hasNull: true min: bye max: hi sum: 0",stats[9].toString());
assertEquals("count: 7500 hasNull: true",stats[8].toString());
assertEquals(5,((BinaryColumnStatistics)stats[8]).getSum());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testStringAndBinaryStatistics() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcSerDeStats.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(SimpleStruct.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000));
writer.addRow(new SimpleStruct(bytes(0,1,2,3,4),"foo"));
writer.addRow(new SimpleStruct(bytes(0,1,2,3),"bar"));
writer.addRow(new SimpleStruct(bytes(0,1,2,3,4,5),null));
writer.addRow(new SimpleStruct(null,"hi"));
writer.close();
assertEquals(4,writer.getNumberOfRows());
assertEquals(273,writer.getRawDataSize());
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
assertEquals(4,reader.getNumberOfRows());
assertEquals(273,reader.getRawDataSize());
assertEquals(15,reader.getRawDataSizeOfColumns(Lists.newArrayList("bytes1")));
assertEquals(258,reader.getRawDataSizeOfColumns(Lists.newArrayList("string1")));
assertEquals(273,reader.getRawDataSizeOfColumns(Lists.newArrayList("bytes1","string1")));
ColumnStatistics[] stats=reader.getStatistics();
assertEquals(4,stats[0].getNumberOfValues());
assertEquals("count: 4 hasNull: false",stats[0].toString());
assertEquals(3,stats[1].getNumberOfValues());
assertEquals(15,((BinaryColumnStatistics)stats[1]).getSum());
assertEquals("count: 3 hasNull: true sum: 15",stats[1].toString());
assertEquals(3,stats[2].getNumberOfValues());
assertEquals("bar",((StringColumnStatistics)stats[2]).getMinimum());
assertEquals("hi",((StringColumnStatistics)stats[2]).getMaximum());
assertEquals(8,((StringColumnStatistics)stats[2]).getSum());
assertEquals("count: 3 hasNull: true min: bar max: hi sum: 8",stats[2].toString());
StructObjectInspector readerInspector=(StructObjectInspector)reader.getObjectInspector();
assertEquals(ObjectInspector.Category.STRUCT,readerInspector.getCategory());
assertEquals("struct",readerInspector.getTypeName());
List extends StructField> fields=readerInspector.getAllStructFieldRefs();
BinaryObjectInspector bi=(BinaryObjectInspector)readerInspector.getStructFieldRef("bytes1").getFieldObjectInspector();
StringObjectInspector st=(StringObjectInspector)readerInspector.getStructFieldRef("string1").getFieldObjectInspector();
RecordReader rows=reader.rows();
Object row=rows.next(null);
assertNotNull(row);
assertEquals(bytes(0,1,2,3,4),bi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,fields.get(0))));
assertEquals("foo",st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(1))));
assertEquals(true,rows.hasNext());
row=rows.next(row);
assertEquals(bytes(0,1,2,3),bi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,fields.get(0))));
assertEquals("bar",st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(1))));
assertEquals(true,rows.hasNext());
row=rows.next(row);
assertEquals(bytes(0,1,2,3,4,5),bi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,fields.get(0))));
assertNull(st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(1))));
assertEquals(true,rows.hasNext());
row=rows.next(row);
assertNull(bi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,fields.get(0))));
assertEquals("hi",st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(1))));
assertEquals(false,rows.hasNext());
rows.close();
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testOrcSerDeStatsSimpleWithNulls() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcSerDeStats.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(SimpleStruct.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(10000).bufferSize(10000));
for (int row=0; row < 1000; row++) {
if (row % 2 == 0) {
writer.addRow(new SimpleStruct(new BytesWritable(new byte[]{1,2,3}),"hi"));
}
else {
writer.addRow(null);
}
}
writer.close();
assertEquals(1000,writer.getNumberOfRows());
assertEquals(44500,writer.getRawDataSize());
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
assertEquals(1000,reader.getNumberOfRows());
assertEquals(44500,reader.getRawDataSize());
assertEquals(1500,reader.getRawDataSizeOfColumns(Lists.newArrayList("bytes1")));
assertEquals(43000,reader.getRawDataSizeOfColumns(Lists.newArrayList("string1")));
assertEquals(44500,reader.getRawDataSizeOfColumns(Lists.newArrayList("bytes1","string1")));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testOrcSerDeStatsList() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcSerDeStats.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(ListStruct.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(10000).bufferSize(10000));
for (int row=0; row < 5000; row++) {
List test=new ArrayList();
for (int i=0; i < 1000; i++) {
test.add("hi");
}
writer.addRow(new ListStruct(test));
}
writer.close();
assertEquals(5000,writer.getNumberOfRows());
assertEquals(430000000,writer.getRawDataSize());
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
assertEquals(5000,reader.getNumberOfRows());
assertEquals(430000000,reader.getRawDataSize());
assertEquals(430000000,reader.getRawDataSizeOfColumns(Lists.newArrayList("list1")));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testOrcSerDeStatsComplexOldFormat() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcSerDeStats.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(BigRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).version(OrcFile.Version.V_0_11).bufferSize(10000));
writer.addRow(new BigRow(false,(byte)1,(short)1024,65536,Long.MAX_VALUE,(float)1.0,-15.0,bytes(0,1,2,3,4),"hi",new MiddleStruct(inner(1,"bye"),inner(2,"sigh")),list(inner(3,"good"),inner(4,"bad")),map(),Timestamp.valueOf("2000-03-12 15:00:00"),HiveDecimal.create("12345678.6547456")));
writer.addRow(new BigRow(true,(byte)100,(short)2048,65536,Long.MAX_VALUE,(float)2.0,-5.0,bytes(),"bye",new MiddleStruct(inner(1,"bye"),inner(2,"sigh")),list(inner(100000000,"cat"),inner(-100000,"in"),inner(1234,"hat")),map(inner(5,"chani"),inner(1,"mauddib")),Timestamp.valueOf("2000-03-11 15:00:00"),HiveDecimal.create("12345678.6547452")));
writer.close();
long rowCount=writer.getNumberOfRows();
long rawDataSize=writer.getRawDataSize();
assertEquals(2,rowCount);
assertEquals(1740,rawDataSize);
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
assertEquals(2,reader.getNumberOfRows());
assertEquals(1740,reader.getRawDataSize());
assertEquals(8,reader.getRawDataSizeOfColumns(Lists.newArrayList("boolean1")));
assertEquals(8,reader.getRawDataSizeOfColumns(Lists.newArrayList("byte1")));
assertEquals(8,reader.getRawDataSizeOfColumns(Lists.newArrayList("short1")));
assertEquals(8,reader.getRawDataSizeOfColumns(Lists.newArrayList("int1")));
assertEquals(16,reader.getRawDataSizeOfColumns(Lists.newArrayList("long1")));
assertEquals(8,reader.getRawDataSizeOfColumns(Lists.newArrayList("float1")));
assertEquals(16,reader.getRawDataSizeOfColumns(Lists.newArrayList("double1")));
assertEquals(5,reader.getRawDataSizeOfColumns(Lists.newArrayList("bytes1")));
assertEquals(172,reader.getRawDataSizeOfColumns(Lists.newArrayList("string1")));
assertEquals(455,reader.getRawDataSizeOfColumns(Lists.newArrayList("list")));
assertEquals(368,reader.getRawDataSizeOfColumns(Lists.newArrayList("map")));
assertEquals(364,reader.getRawDataSizeOfColumns(Lists.newArrayList("middle")));
assertEquals(80,reader.getRawDataSizeOfColumns(Lists.newArrayList("ts")));
assertEquals(224,reader.getRawDataSizeOfColumns(Lists.newArrayList("decimal1")));
assertEquals(88,reader.getRawDataSizeOfColumns(Lists.newArrayList("ts","int1")));
assertEquals(1195,reader.getRawDataSizeOfColumns(Lists.newArrayList("middle","list","map","float1")));
assertEquals(185,reader.getRawDataSizeOfColumns(Lists.newArrayList("bytes1","byte1","string1")));
assertEquals(rawDataSize,reader.getRawDataSizeOfColumns(Lists.newArrayList("boolean1","byte1","short1","int1","long1","float1","double1","bytes1","string1","list","map","middle","ts","decimal1")));
ColumnStatistics[] stats=reader.getStatistics();
assertEquals(2,stats[1].getNumberOfValues());
assertEquals(1,((BooleanColumnStatistics)stats[1]).getFalseCount());
assertEquals(1,((BooleanColumnStatistics)stats[1]).getTrueCount());
assertEquals("count: 2 hasNull: false true: 1",stats[1].toString());
assertEquals(2048,((IntegerColumnStatistics)stats[3]).getMaximum());
assertEquals(1024,((IntegerColumnStatistics)stats[3]).getMinimum());
assertEquals(true,((IntegerColumnStatistics)stats[3]).isSumDefined());
assertEquals(3072,((IntegerColumnStatistics)stats[3]).getSum());
assertEquals("count: 2 hasNull: false min: 1024 max: 2048 sum: 3072",stats[3].toString());
assertEquals(Long.MAX_VALUE,((IntegerColumnStatistics)stats[5]).getMaximum());
assertEquals(Long.MAX_VALUE,((IntegerColumnStatistics)stats[5]).getMinimum());
assertEquals(false,((IntegerColumnStatistics)stats[5]).isSumDefined());
assertEquals("count: 2 hasNull: false min: 9223372036854775807 max: 9223372036854775807",stats[5].toString());
assertEquals(-15.0,((DoubleColumnStatistics)stats[7]).getMinimum());
assertEquals(-5.0,((DoubleColumnStatistics)stats[7]).getMaximum());
assertEquals(-20.0,((DoubleColumnStatistics)stats[7]).getSum(),0.00001);
assertEquals("count: 2 hasNull: false min: -15.0 max: -5.0 sum: -20.0",stats[7].toString());
assertEquals(5,((BinaryColumnStatistics)stats[8]).getSum());
assertEquals("count: 2 hasNull: false sum: 5",stats[8].toString());
assertEquals("bye",((StringColumnStatistics)stats[9]).getMinimum());
assertEquals("hi",((StringColumnStatistics)stats[9]).getMaximum());
assertEquals(5,((StringColumnStatistics)stats[9]).getSum());
assertEquals("count: 2 hasNull: false min: bye max: hi sum: 5",stats[9].toString());
}
Class: org.apache.hadoop.hive.ql.io.orc.TestOrcSplitElimination InternalCallVerifier EqualityVerifier
@Test public void testSplitEliminationLargeMaxSplit() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(AllTypesRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(fs,testFilePath,conf,inspector,100000,CompressionKind.NONE,10000,10000);
writeData(writer);
writer.close();
HiveConf.setLongVar(conf,HiveConf.ConfVars.MAPREDMINSPLITSIZE,1000);
HiveConf.setLongVar(conf,HiveConf.ConfVars.MAPREDMAXSPLITSIZE,150000);
InputFormat,?> in=new OrcInputFormat();
FileInputFormat.setInputPaths(conf,testFilePath.toString());
GenericUDF udf=new GenericUDFOPEqualOrLessThan();
List childExpr=Lists.newArrayList();
ExprNodeColumnDesc col=new ExprNodeColumnDesc(Long.class,"userid","T",false);
ExprNodeConstantDesc con=new ExprNodeConstantDesc(100);
childExpr.add(col);
childExpr.add(con);
ExprNodeGenericFuncDesc en=new ExprNodeGenericFuncDesc(inspector,udf,childExpr);
String sargStr=SerializationUtilities.serializeExpression(en);
conf.set("hive.io.filter.expr.serialized",sargStr);
InputSplit[] splits=in.getSplits(conf,1);
assertEquals(2,splits.length);
con=new ExprNodeConstantDesc(0);
childExpr.set(1,con);
en=new ExprNodeGenericFuncDesc(inspector,udf,childExpr);
sargStr=SerializationUtilities.serializeExpression(en);
conf.set("hive.io.filter.expr.serialized",sargStr);
splits=in.getSplits(conf,1);
assertEquals(0,splits.length);
con=new ExprNodeConstantDesc(2);
childExpr.set(1,con);
en=new ExprNodeGenericFuncDesc(inspector,udf,childExpr);
sargStr=SerializationUtilities.serializeExpression(en);
conf.set("hive.io.filter.expr.serialized",sargStr);
splits=in.getSplits(conf,1);
assertEquals(1,splits.length);
con=new ExprNodeConstantDesc(5);
childExpr.set(1,con);
en=new ExprNodeGenericFuncDesc(inspector,udf,childExpr);
sargStr=SerializationUtilities.serializeExpression(en);
conf.set("hive.io.filter.expr.serialized",sargStr);
splits=in.getSplits(conf,1);
assertEquals(2,splits.length);
con=new ExprNodeConstantDesc(13);
childExpr.set(1,con);
en=new ExprNodeGenericFuncDesc(inspector,udf,childExpr);
sargStr=SerializationUtilities.serializeExpression(en);
conf.set("hive.io.filter.expr.serialized",sargStr);
splits=in.getSplits(conf,1);
assertEquals(2,splits.length);
con=new ExprNodeConstantDesc(29);
childExpr.set(1,con);
en=new ExprNodeGenericFuncDesc(inspector,udf,childExpr);
sargStr=SerializationUtilities.serializeExpression(en);
conf.set("hive.io.filter.expr.serialized",sargStr);
splits=in.getSplits(conf,1);
assertEquals(2,splits.length);
con=new ExprNodeConstantDesc(70);
childExpr.set(1,con);
en=new ExprNodeGenericFuncDesc(inspector,udf,childExpr);
sargStr=SerializationUtilities.serializeExpression(en);
conf.set("hive.io.filter.expr.serialized",sargStr);
splits=in.getSplits(conf,1);
assertEquals(2,splits.length);
}
InternalCallVerifier EqualityVerifier
@Test public void testSplitEliminationSmallMaxSplit() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(AllTypesRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(fs,testFilePath,conf,inspector,100000,CompressionKind.NONE,10000,10000);
writeData(writer);
writer.close();
HiveConf.setLongVar(conf,HiveConf.ConfVars.MAPREDMINSPLITSIZE,1000);
HiveConf.setLongVar(conf,HiveConf.ConfVars.MAPREDMAXSPLITSIZE,5000);
InputFormat,?> in=new OrcInputFormat();
FileInputFormat.setInputPaths(conf,testFilePath.toString());
GenericUDF udf=new GenericUDFOPEqualOrLessThan();
List childExpr=Lists.newArrayList();
ExprNodeColumnDesc col=new ExprNodeColumnDesc(Long.class,"userid","T",false);
ExprNodeConstantDesc con=new ExprNodeConstantDesc(100);
childExpr.add(col);
childExpr.add(con);
ExprNodeGenericFuncDesc en=new ExprNodeGenericFuncDesc(inspector,udf,childExpr);
String sargStr=SerializationUtilities.serializeExpression(en);
conf.set("hive.io.filter.expr.serialized",sargStr);
InputSplit[] splits=in.getSplits(conf,1);
assertEquals(5,splits.length);
con=new ExprNodeConstantDesc(1);
childExpr.set(1,con);
en=new ExprNodeGenericFuncDesc(inspector,udf,childExpr);
sargStr=SerializationUtilities.serializeExpression(en);
conf.set("hive.io.filter.expr.serialized",sargStr);
splits=in.getSplits(conf,1);
assertEquals(0,splits.length);
con=new ExprNodeConstantDesc(2);
childExpr.set(1,con);
en=new ExprNodeGenericFuncDesc(inspector,udf,childExpr);
sargStr=SerializationUtilities.serializeExpression(en);
conf.set("hive.io.filter.expr.serialized",sargStr);
splits=in.getSplits(conf,1);
assertEquals(1,splits.length);
con=new ExprNodeConstantDesc(5);
childExpr.set(1,con);
en=new ExprNodeGenericFuncDesc(inspector,udf,childExpr);
sargStr=SerializationUtilities.serializeExpression(en);
conf.set("hive.io.filter.expr.serialized",sargStr);
splits=in.getSplits(conf,1);
assertEquals(2,splits.length);
con=new ExprNodeConstantDesc(13);
childExpr.set(1,con);
en=new ExprNodeGenericFuncDesc(inspector,udf,childExpr);
sargStr=SerializationUtilities.serializeExpression(en);
conf.set("hive.io.filter.expr.serialized",sargStr);
splits=in.getSplits(conf,1);
assertEquals(3,splits.length);
con=new ExprNodeConstantDesc(29);
childExpr.set(1,con);
en=new ExprNodeGenericFuncDesc(inspector,udf,childExpr);
sargStr=SerializationUtilities.serializeExpression(en);
conf.set("hive.io.filter.expr.serialized",sargStr);
splits=in.getSplits(conf,1);
assertEquals(4,splits.length);
con=new ExprNodeConstantDesc(70);
childExpr.set(1,con);
en=new ExprNodeGenericFuncDesc(inspector,udf,childExpr);
sargStr=SerializationUtilities.serializeExpression(en);
conf.set("hive.io.filter.expr.serialized",sargStr);
splits=in.getSplits(conf,1);
assertEquals(5,splits.length);
}
InternalCallVerifier EqualityVerifier
@Test public void testSplitEliminationComplexExpr() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(AllTypesRow.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(fs,testFilePath,conf,inspector,100000,CompressionKind.NONE,10000,10000);
writeData(writer);
writer.close();
HiveConf.setLongVar(conf,HiveConf.ConfVars.MAPREDMINSPLITSIZE,1000);
HiveConf.setLongVar(conf,HiveConf.ConfVars.MAPREDMAXSPLITSIZE,150000);
InputFormat,?> in=new OrcInputFormat();
FileInputFormat.setInputPaths(conf,testFilePath.toString());
GenericUDF udf=new GenericUDFOPEqualOrLessThan();
List childExpr=Lists.newArrayList();
ExprNodeColumnDesc col=new ExprNodeColumnDesc(Long.class,"userid","T",false);
ExprNodeConstantDesc con=new ExprNodeConstantDesc(100);
childExpr.add(col);
childExpr.add(con);
ExprNodeGenericFuncDesc en=new ExprNodeGenericFuncDesc(inspector,udf,childExpr);
GenericUDF udf1=new GenericUDFOPEqualOrLessThan();
List childExpr1=Lists.newArrayList();
ExprNodeColumnDesc col1=new ExprNodeColumnDesc(Double.class,"subtype","T",false);
ExprNodeConstantDesc con1=new ExprNodeConstantDesc(1000.0);
childExpr1.add(col1);
childExpr1.add(con1);
ExprNodeGenericFuncDesc en1=new ExprNodeGenericFuncDesc(inspector,udf1,childExpr1);
GenericUDF udf2=new GenericUDFOPAnd();
List childExpr2=Lists.newArrayList();
childExpr2.add(en);
childExpr2.add(en1);
ExprNodeGenericFuncDesc en2=new ExprNodeGenericFuncDesc(inspector,udf2,childExpr2);
String sargStr=SerializationUtilities.serializeExpression(en2);
conf.set("hive.io.filter.expr.serialized",sargStr);
InputSplit[] splits=in.getSplits(conf,1);
assertEquals(2,splits.length);
con=new ExprNodeConstantDesc(2);
childExpr.set(1,con);
en=new ExprNodeGenericFuncDesc(inspector,udf,childExpr);
con1=new ExprNodeConstantDesc(0.0);
childExpr1.set(1,con1);
en1=new ExprNodeGenericFuncDesc(inspector,udf1,childExpr1);
childExpr2.set(0,en);
childExpr2.set(1,en1);
en2=new ExprNodeGenericFuncDesc(inspector,udf2,childExpr2);
sargStr=SerializationUtilities.serializeExpression(en2);
conf.set("hive.io.filter.expr.serialized",sargStr);
splits=in.getSplits(conf,1);
assertEquals(0,splits.length);
con=new ExprNodeConstantDesc(2);
childExpr.set(1,con);
en=new ExprNodeGenericFuncDesc(inspector,udf,childExpr);
con1=new ExprNodeConstantDesc(1.0);
childExpr1.set(1,con1);
en1=new ExprNodeGenericFuncDesc(inspector,udf1,childExpr1);
childExpr2.set(0,en);
childExpr2.set(1,en1);
en2=new ExprNodeGenericFuncDesc(inspector,udf2,childExpr2);
sargStr=SerializationUtilities.serializeExpression(en2);
conf.set("hive.io.filter.expr.serialized",sargStr);
splits=in.getSplits(conf,1);
assertEquals(1,splits.length);
udf=new GenericUDFOPEqual();
con=new ExprNodeConstantDesc(13);
childExpr.set(1,con);
en=new ExprNodeGenericFuncDesc(inspector,udf,childExpr);
con1=new ExprNodeConstantDesc(80.0);
childExpr1.set(1,con1);
en1=new ExprNodeGenericFuncDesc(inspector,udf1,childExpr1);
childExpr2.set(0,en);
childExpr2.set(1,en1);
en2=new ExprNodeGenericFuncDesc(inspector,udf2,childExpr2);
sargStr=SerializationUtilities.serializeExpression(en2);
conf.set("hive.io.filter.expr.serialized",sargStr);
splits=in.getSplits(conf,1);
assertEquals(2,splits.length);
udf=new GenericUDFOPEqual();
con=new ExprNodeConstantDesc(13);
childExpr.set(1,con);
en=new ExprNodeGenericFuncDesc(inspector,udf,childExpr);
udf1=new GenericUDFOPEqual();
con1=new ExprNodeConstantDesc(80.0);
childExpr1.set(1,con1);
en1=new ExprNodeGenericFuncDesc(inspector,udf1,childExpr1);
childExpr2.set(0,en);
childExpr2.set(1,en1);
en2=new ExprNodeGenericFuncDesc(inspector,udf2,childExpr2);
sargStr=SerializationUtilities.serializeExpression(en2);
conf.set("hive.io.filter.expr.serialized",sargStr);
splits=in.getSplits(conf,1);
assertEquals(1,splits.length);
}
Class: org.apache.hadoop.hive.ql.io.orc.TestOrcStruct InternalCallVerifier EqualityVerifier
@Test public void testUnion() throws Exception {
OrcUnion un1=new OrcUnion();
OrcUnion un2=new OrcUnion();
un1.set((byte)0,"hi");
un2.set((byte)0,"hi");
assertEquals(un1,un2);
assertEquals(un1.hashCode(),un2.hashCode());
un2.set((byte)0,null);
assertEquals(false,un1.equals(un2));
assertEquals(false,un2.equals(un1));
un1.set((byte)0,null);
assertEquals(un1,un2);
un2.set((byte)0,"hi");
un1.set((byte)1,"hi");
assertEquals(false,un1.equals(un2));
assertEquals(false,un1.hashCode() == un2.hashCode());
un2.set((byte)1,"byte");
assertEquals(false,un1.equals(un2));
assertEquals("union(1, hi)",un1.toString());
assertEquals(false,un1.equals(null));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testInspectorFromTypeInfo() throws Exception {
TypeInfo typeInfo=TypeInfoUtils.getTypeInfoFromTypeString("struct,c11:map,c12:uniontype"+ ",c13:array>");
StructObjectInspector inspector=(StructObjectInspector)OrcStruct.createObjectInspector(typeInfo);
assertEquals("struct,c11:map,c12:uniontype,c13:array>",inspector.getTypeName());
assertEquals(null,inspector.getAllStructFieldRefs().get(0).getFieldComment());
assertEquals(null,inspector.getStructFieldRef("UNKNOWN"));
OrcStruct s1=new OrcStruct(13);
for (int i=0; i < 13; ++i) {
s1.setFieldValue(i,i);
}
List list=new ArrayList();
list.addAll(Arrays.asList(0,1,2,3,4,5,6,7,8,9,10,11,12));
assertEquals(list,inspector.getStructFieldsDataAsList(s1));
ListObjectInspector listOI=(ListObjectInspector)inspector.getAllStructFieldRefs().get(12).getFieldObjectInspector();
assertEquals(ObjectInspector.Category.LIST,listOI.getCategory());
assertEquals(10,listOI.getListElement(list,10));
assertEquals(null,listOI.getListElement(list,-1));
assertEquals(null,listOI.getListElement(list,13));
assertEquals(13,listOI.getListLength(list));
Map map=new HashMap();
map.put(1,2);
map.put(2,4);
map.put(3,6);
MapObjectInspector mapOI=(MapObjectInspector)inspector.getAllStructFieldRefs().get(10).getFieldObjectInspector();
assertEquals(3,mapOI.getMapSize(map));
assertEquals(4,mapOI.getMapValueElement(map,2));
}
InternalCallVerifier EqualityVerifier
@Test public void testStruct() throws Exception {
OrcStruct st1=new OrcStruct(4);
OrcStruct st2=new OrcStruct(4);
OrcStruct st3=new OrcStruct(3);
st1.setFieldValue(0,"hop");
st1.setFieldValue(1,"on");
st1.setFieldValue(2,"pop");
st1.setFieldValue(3,42);
assertEquals(false,st1.equals(null));
st2.setFieldValue(0,"hop");
st2.setFieldValue(1,"on");
st2.setFieldValue(2,"pop");
st2.setFieldValue(3,42);
assertEquals(st1,st2);
st3.setFieldValue(0,"hop");
st3.setFieldValue(1,"on");
st3.setFieldValue(2,"pop");
assertEquals(false,st1.equals(st3));
assertEquals(11241,st1.hashCode());
assertEquals(st1.hashCode(),st2.hashCode());
assertEquals(11204,st3.hashCode());
assertEquals("{hop, on, pop, 42}",st1.toString());
st1.setFieldValue(3,null);
assertEquals(false,st1.equals(st2));
assertEquals(false,st2.equals(st1));
st2.setFieldValue(3,null);
assertEquals(st1,st2);
}
Class: org.apache.hadoop.hive.ql.io.orc.TestOrcTimezone1 APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testTimestampWriter() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Timestamp.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
TimeZone.setDefault(TimeZone.getTimeZone(writerTimeZone));
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000));
assertEquals(writerTimeZone,TimeZone.getDefault().getID());
List ts=Lists.newArrayList();
ts.add("2003-01-01 01:00:00.000000222");
ts.add("1996-08-02 09:00:00.723100809");
ts.add("1999-01-01 02:00:00.999999999");
ts.add("1995-01-02 03:00:00.688888888");
ts.add("2002-01-01 04:00:00.1");
ts.add("2010-03-02 05:00:00.000009001");
ts.add("2005-01-01 06:00:00.000002229");
ts.add("2006-01-01 07:00:00.900203003");
ts.add("2003-01-01 08:00:00.800000007");
ts.add("1998-11-02 10:00:00.857340643");
ts.add("2008-10-02 11:00:00.0");
ts.add("2037-01-01 00:00:00.000999");
ts.add("2014-03-28 00:00:00.0");
for ( String t : ts) {
writer.addRow(Timestamp.valueOf(t));
}
writer.close();
TimeZone.setDefault(TimeZone.getTimeZone(readerTimeZone));
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
assertEquals(readerTimeZone,TimeZone.getDefault().getID());
RecordReader rows=reader.rows(null);
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
Timestamp got=((TimestampWritable)row).getTimestamp();
assertEquals(ts.get(idx++),got.toString());
}
rows.close();
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testReadTimestampFormat_0_11() throws Exception {
TimeZone.setDefault(TimeZone.getTimeZone(readerTimeZone));
Path oldFilePath=new Path(HiveTestUtils.getFileFromClasspath("orc-file-11-format.orc"));
Reader reader=OrcFile.createReader(oldFilePath,OrcFile.readerOptions(conf).filesystem(fs));
StructObjectInspector readerInspector=(StructObjectInspector)reader.getObjectInspector();
List extends StructField> fields=readerInspector.getAllStructFieldRefs();
TimestampObjectInspector tso=(TimestampObjectInspector)readerInspector.getStructFieldRef("ts").getFieldObjectInspector();
RecordReader rows=reader.rows();
Object row=rows.next(null);
assertNotNull(row);
assertEquals(Timestamp.valueOf("2000-03-12 15:00:00"),tso.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(12))));
assertEquals(true,rows.hasNext());
rows.seekToRow(7499);
row=rows.next(null);
assertEquals(Timestamp.valueOf("2000-03-12 15:00:01"),tso.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(12))));
assertEquals(false,rows.hasNext());
rows.close();
}
Class: org.apache.hadoop.hive.ql.io.orc.TestOrcTimezone2 APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testTimestampWriter() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Timestamp.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
TimeZone.setDefault(TimeZone.getTimeZone(writerTimeZone));
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000));
assertEquals(writerTimeZone,TimeZone.getDefault().getID());
List ts=Lists.newArrayList();
ts.add("2003-01-01 01:00:00.000000222");
ts.add("1999-01-01 02:00:00.999999999");
ts.add("1995-01-02 03:00:00.688888888");
ts.add("2002-01-01 04:00:00.1");
ts.add("2010-03-02 05:00:00.000009001");
ts.add("2005-01-01 06:00:00.000002229");
ts.add("2006-01-01 07:00:00.900203003");
ts.add("2003-01-01 08:00:00.800000007");
ts.add("1996-08-02 09:00:00.723100809");
ts.add("1998-11-02 10:00:00.857340643");
ts.add("2008-10-02 11:00:00.0");
ts.add("2037-01-01 00:00:00.000999");
for ( String t : ts) {
writer.addRow(Timestamp.valueOf(t));
}
writer.close();
TimeZone.setDefault(TimeZone.getTimeZone(readerTimeZone));
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
assertEquals(readerTimeZone,TimeZone.getDefault().getID());
RecordReader rows=reader.rows(null);
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
Timestamp got=((TimestampWritable)row).getTimestamp();
assertEquals(ts.get(idx++),got.toString());
}
rows.close();
}
Class: org.apache.hadoop.hive.ql.io.orc.TestOrcWideTable EqualityVerifier
@Test public void testBufferSizeFor50Col() throws IOException {
assertEquals(256 * 1024,WriterImpl.getEstimatedBufferSize(256 * 1024 * 1024,50,256 * 1024));
}
EqualityVerifier
@Test public void testBufferSizeFor2000Col() throws IOException {
assertEquals(16 * 1024,WriterImpl.getEstimatedBufferSize(512 * 1024 * 1024,2000,256 * 1024));
}
EqualityVerifier
@Test public void testBufferSizeFor4000Col() throws IOException {
assertEquals(8 * 1024,WriterImpl.getEstimatedBufferSize(512 * 1024 * 1024,4000,256 * 1024));
}
EqualityVerifier
@Test public void testBufferSizeFor25000Col() throws IOException {
assertEquals(4 * 1024,WriterImpl.getEstimatedBufferSize(512 * 1024 * 1024,25000,256 * 1024));
}
EqualityVerifier
@Test public void testBufferSizeFor1000Col() throws IOException {
assertEquals(32 * 1024,WriterImpl.getEstimatedBufferSize(512 * 1024 * 1024,1000,128 * 1024));
}
EqualityVerifier
@Test public void testBufferSizeFor1Col() throws IOException {
assertEquals(128 * 1024,WriterImpl.getEstimatedBufferSize(512 * 1024 * 1024,1,128 * 1024));
}
Class: org.apache.hadoop.hive.ql.io.orc.TestRLEv2 APIUtilityVerifier EqualityVerifier
@Test public void testFixedDeltaLarge() throws Exception {
ObjectInspector inspector=ObjectInspectorFactory.getReflectionObjectInspector(Integer.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
Writer w=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).compress(CompressionKind.NONE).inspector(inspector).rowIndexStride(0).encodingStrategy(OrcFile.EncodingStrategy.COMPRESSION).version(OrcFile.Version.V_0_12));
for (int i=0; i < 5120; ++i) {
w.addRow(i % 512 + ((i % 512) * 100));
}
w.close();
PrintStream origOut=System.out;
ByteArrayOutputStream myOut=new ByteArrayOutputStream();
System.setOut(new PrintStream(myOut));
FileDump.main(new String[]{testFilePath.toUri().toString()});
System.out.flush();
String outDump=new String(myOut.toByteArray());
assertEquals(true,outDump.contains("Stream: column 0 section DATA start: 3 length 50"));
System.setOut(origOut);
}
APIUtilityVerifier EqualityVerifier
@Test public void testDeltaUnknownSign() throws Exception {
ObjectInspector inspector=ObjectInspectorFactory.getReflectionObjectInspector(Integer.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
Writer w=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).compress(CompressionKind.NONE).inspector(inspector).rowIndexStride(0).encodingStrategy(OrcFile.EncodingStrategy.COMPRESSION).version(OrcFile.Version.V_0_12));
w.addRow(0);
for (int i=0; i < 511; ++i) {
w.addRow(i);
}
w.close();
PrintStream origOut=System.out;
ByteArrayOutputStream myOut=new ByteArrayOutputStream();
System.setOut(new PrintStream(myOut));
FileDump.main(new String[]{testFilePath.toUri().toString()});
System.out.flush();
String outDump=new String(myOut.toByteArray());
assertEquals(true,outDump.contains("Stream: column 0 section DATA start: 3 length 642"));
System.setOut(origOut);
}
APIUtilityVerifier EqualityVerifier
@Test public void testFixedDeltaOne() throws Exception {
ObjectInspector inspector=ObjectInspectorFactory.getReflectionObjectInspector(Integer.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
Writer w=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).compress(CompressionKind.NONE).inspector(inspector).rowIndexStride(0).encodingStrategy(OrcFile.EncodingStrategy.COMPRESSION).version(OrcFile.Version.V_0_12));
for (int i=0; i < 5120; ++i) {
w.addRow(i % 512);
}
w.close();
PrintStream origOut=System.out;
ByteArrayOutputStream myOut=new ByteArrayOutputStream();
System.setOut(new PrintStream(myOut));
FileDump.main(new String[]{testFilePath.toUri().toString()});
System.out.flush();
String outDump=new String(myOut.toByteArray());
assertEquals(true,outDump.contains("Stream: column 0 section DATA start: 3 length 40"));
System.setOut(origOut);
}
APIUtilityVerifier EqualityVerifier
@Test public void testFixedDeltaOneDescending() throws Exception {
ObjectInspector inspector=ObjectInspectorFactory.getReflectionObjectInspector(Integer.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
Writer w=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).compress(CompressionKind.NONE).inspector(inspector).rowIndexStride(0).encodingStrategy(OrcFile.EncodingStrategy.COMPRESSION).version(OrcFile.Version.V_0_12));
for (int i=0; i < 5120; ++i) {
w.addRow(512 - (i % 512));
}
w.close();
PrintStream origOut=System.out;
ByteArrayOutputStream myOut=new ByteArrayOutputStream();
System.setOut(new PrintStream(myOut));
FileDump.main(new String[]{testFilePath.toUri().toString()});
System.out.flush();
String outDump=new String(myOut.toByteArray());
assertEquals(true,outDump.contains("Stream: column 0 section DATA start: 3 length 50"));
System.setOut(origOut);
}
APIUtilityVerifier EqualityVerifier
@Test public void testFixedDeltaLargeDescending() throws Exception {
ObjectInspector inspector=ObjectInspectorFactory.getReflectionObjectInspector(Integer.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
Writer w=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).compress(CompressionKind.NONE).inspector(inspector).rowIndexStride(0).encodingStrategy(OrcFile.EncodingStrategy.COMPRESSION).version(OrcFile.Version.V_0_12));
for (int i=0; i < 5120; ++i) {
w.addRow((512 - i % 512) + ((i % 512) * 100));
}
w.close();
PrintStream origOut=System.out;
ByteArrayOutputStream myOut=new ByteArrayOutputStream();
System.setOut(new PrintStream(myOut));
FileDump.main(new String[]{testFilePath.toUri().toString()});
System.out.flush();
String outDump=new String(myOut.toByteArray());
assertEquals(true,outDump.contains("Stream: column 0 section DATA start: 3 length 60"));
System.setOut(origOut);
}
APIUtilityVerifier EqualityVerifier
@Test public void testShortRepeat() throws Exception {
ObjectInspector inspector=ObjectInspectorFactory.getReflectionObjectInspector(Integer.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
Writer w=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).compress(CompressionKind.NONE).inspector(inspector).rowIndexStride(0).encodingStrategy(OrcFile.EncodingStrategy.COMPRESSION).version(OrcFile.Version.V_0_12));
for (int i=0; i < 5; ++i) {
w.addRow(10);
}
w.close();
PrintStream origOut=System.out;
ByteArrayOutputStream myOut=new ByteArrayOutputStream();
System.setOut(new PrintStream(myOut));
FileDump.main(new String[]{testFilePath.toUri().toString()});
System.out.flush();
String outDump=new String(myOut.toByteArray());
assertEquals(true,outDump.contains("Stream: column 0 section DATA start: 3 length 2"));
System.setOut(origOut);
}
APIUtilityVerifier EqualityVerifier
@Test public void testFixedDeltaZero() throws Exception {
ObjectInspector inspector=ObjectInspectorFactory.getReflectionObjectInspector(Integer.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
Writer w=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).compress(CompressionKind.NONE).inspector(inspector).rowIndexStride(0).encodingStrategy(OrcFile.EncodingStrategy.COMPRESSION).version(OrcFile.Version.V_0_12));
for (int i=0; i < 5120; ++i) {
w.addRow(123);
}
w.close();
PrintStream origOut=System.out;
ByteArrayOutputStream myOut=new ByteArrayOutputStream();
System.setOut(new PrintStream(myOut));
FileDump.main(new String[]{testFilePath.toUri().toString()});
System.out.flush();
String outDump=new String(myOut.toByteArray());
assertEquals(true,outDump.contains("Stream: column 0 section DATA start: 3 length 50"));
System.setOut(origOut);
}
APIUtilityVerifier EqualityVerifier
@Test public void testPatchedBase() throws Exception {
ObjectInspector inspector=ObjectInspectorFactory.getReflectionObjectInspector(Integer.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
Writer w=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).compress(CompressionKind.NONE).inspector(inspector).rowIndexStride(0).encodingStrategy(OrcFile.EncodingStrategy.COMPRESSION).version(OrcFile.Version.V_0_12));
Random rand=new Random(123);
w.addRow(10000000);
for (int i=0; i < 511; ++i) {
w.addRow(rand.nextInt(i + 1));
}
w.close();
PrintStream origOut=System.out;
ByteArrayOutputStream myOut=new ByteArrayOutputStream();
System.setOut(new PrintStream(myOut));
FileDump.main(new String[]{testFilePath.toUri().toString()});
System.out.flush();
String outDump=new String(myOut.toByteArray());
assertEquals(true,outDump.contains("Stream: column 0 section DATA start: 3 length 583"));
System.setOut(origOut);
}
Class: org.apache.hadoop.hive.ql.io.orc.TestRecordReaderImpl EqualityVerifier
@Test public void testLessThanWithNullInStats() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.LESS_THAN,PredicateLeaf.Type.STRING,"x","c",null);
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("d","e",true),pred,null));
assertEquals(TruthValue.YES_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("a","b",true),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("b","c",true),pred,null));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("c","d",true),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("b","d",true),pred,null));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("c","c",true),pred,null));
}
EqualityVerifier
@Test public void testCompareToCharNeedConvert() throws Exception {
assertEquals(Location.BEFORE,RecordReaderImpl.compareToRange("apple","hello","world"));
assertEquals(Location.AFTER,RecordReaderImpl.compareToRange("zombie","hello","world"));
assertEquals(Location.MIN,RecordReaderImpl.compareToRange("hello","hello","world"));
assertEquals(Location.MIDDLE,RecordReaderImpl.compareToRange("pilot","hello","world"));
assertEquals(Location.MAX,RecordReaderImpl.compareToRange("world","hello","world"));
assertEquals(Location.BEFORE,RecordReaderImpl.compareToRange("apple","hello","hello"));
assertEquals(Location.MIN,RecordReaderImpl.compareToRange("hello","hello","hello"));
assertEquals(Location.AFTER,RecordReaderImpl.compareToRange("zombie","hello","hello"));
}
InternalCallVerifier EqualityVerifier
@Test public void testIntNullSafeEqualsBloomFilter() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.LONG,"x",15L,null);
BloomFilterIO bf=new BloomFilterIO(10000);
for (int i=20; i < 1000; i++) {
bf.addLong(i);
}
ColumnStatistics cs=ColumnStatisticsImpl.deserialize(createIntStats(10,100));
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addLong(15);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
}
EqualityVerifier
@Test public void testPredEvalWithTimestampStats() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.LONG,"x",15L,null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createTimestampStats(10,100),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.FLOAT,"x",15.0,null);
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createTimestampStats(10,100),pred,null));
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createTimestampStats(10000,100000),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.STRING,"x","15",null);
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createTimestampStats(10,100),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.STRING,"x",new Timestamp(15).toString(),null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createTimestampStats(10,100),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.DATE,"x",new DateWritable(15).get(),null);
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createTimestampStats(10,100),pred,null));
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createTimestampStats(10 * 24L * 60L* 60L* 1000L,100 * 24L * 60L* 60L* 1000L),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.DECIMAL,"x",new HiveDecimalWritable("15"),null);
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createTimestampStats(10,100),pred,null));
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createTimestampStats(10000,100000),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.TIMESTAMP,"x",new Timestamp(15),null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createTimestampStats(10,100),pred,null));
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createTimestampStats(10000,100000),pred,null));
}
InternalCallVerifier EqualityVerifier
@Test public void testDateWritableEqualsBloomFilter() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.EQUALS,PredicateLeaf.Type.DATE,"x",new DateWritable(15).get(),null);
BloomFilterIO bf=new BloomFilterIO(10000);
for (int i=20; i < 1000; i++) {
bf.addLong((new DateWritable(i)).getDays());
}
ColumnStatistics cs=ColumnStatisticsImpl.deserialize(createDateStats(10,100));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addLong((new DateWritable(15)).getDays());
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testNullsInBloomFilter() throws Exception {
List args=new ArrayList();
args.add(new HiveDecimalWritable("15"));
args.add(null);
args.add(new HiveDecimalWritable("19"));
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.IN,PredicateLeaf.Type.DECIMAL,"x",null,args);
BloomFilterIO bf=new BloomFilterIO(10000);
for (int i=20; i < 1000; i++) {
bf.addString(HiveDecimal.create(i).toString());
}
ColumnStatistics cs=ColumnStatisticsImpl.deserialize(createDecimalStats("10","200",false));
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
cs=ColumnStatisticsImpl.deserialize(createDecimalStats("10","200",true));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addString(HiveDecimal.create(19).toString());
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addString(HiveDecimal.create(15).toString());
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
}
EqualityVerifier
@Test public void testEqualsWithNullInStats() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.EQUALS,PredicateLeaf.Type.STRING,"x","c",null);
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("d","e",true),pred,null));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("a","b",true),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("b","c",true),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("c","d",true),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("b","d",true),pred,null));
assertEquals(TruthValue.YES_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("c","c",true),pred,null));
}
EqualityVerifier
@Test public void testGetIndexPosition() throws Exception {
assertEquals(0,RecordReaderUtils.getIndexPosition(OrcProto.ColumnEncoding.Kind.DIRECT,OrcProto.Type.Kind.INT,OrcProto.Stream.Kind.PRESENT,true,true));
assertEquals(4,RecordReaderUtils.getIndexPosition(OrcProto.ColumnEncoding.Kind.DIRECT,OrcProto.Type.Kind.INT,OrcProto.Stream.Kind.DATA,true,true));
assertEquals(3,RecordReaderUtils.getIndexPosition(OrcProto.ColumnEncoding.Kind.DIRECT,OrcProto.Type.Kind.INT,OrcProto.Stream.Kind.DATA,false,true));
assertEquals(0,RecordReaderUtils.getIndexPosition(OrcProto.ColumnEncoding.Kind.DIRECT,OrcProto.Type.Kind.INT,OrcProto.Stream.Kind.DATA,true,false));
assertEquals(4,RecordReaderUtils.getIndexPosition(OrcProto.ColumnEncoding.Kind.DICTIONARY,OrcProto.Type.Kind.STRING,OrcProto.Stream.Kind.DATA,true,true));
assertEquals(4,RecordReaderUtils.getIndexPosition(OrcProto.ColumnEncoding.Kind.DIRECT,OrcProto.Type.Kind.BINARY,OrcProto.Stream.Kind.DATA,true,true));
assertEquals(3,RecordReaderUtils.getIndexPosition(OrcProto.ColumnEncoding.Kind.DIRECT,OrcProto.Type.Kind.BINARY,OrcProto.Stream.Kind.DATA,false,true));
assertEquals(6,RecordReaderUtils.getIndexPosition(OrcProto.ColumnEncoding.Kind.DIRECT,OrcProto.Type.Kind.BINARY,OrcProto.Stream.Kind.LENGTH,true,true));
assertEquals(4,RecordReaderUtils.getIndexPosition(OrcProto.ColumnEncoding.Kind.DIRECT,OrcProto.Type.Kind.BINARY,OrcProto.Stream.Kind.LENGTH,false,true));
assertEquals(4,RecordReaderUtils.getIndexPosition(OrcProto.ColumnEncoding.Kind.DIRECT,OrcProto.Type.Kind.DECIMAL,OrcProto.Stream.Kind.DATA,true,true));
assertEquals(3,RecordReaderUtils.getIndexPosition(OrcProto.ColumnEncoding.Kind.DIRECT,OrcProto.Type.Kind.DECIMAL,OrcProto.Stream.Kind.DATA,false,true));
assertEquals(6,RecordReaderUtils.getIndexPosition(OrcProto.ColumnEncoding.Kind.DIRECT,OrcProto.Type.Kind.DECIMAL,OrcProto.Stream.Kind.SECONDARY,true,true));
assertEquals(4,RecordReaderUtils.getIndexPosition(OrcProto.ColumnEncoding.Kind.DIRECT,OrcProto.Type.Kind.DECIMAL,OrcProto.Stream.Kind.SECONDARY,false,true));
assertEquals(4,RecordReaderUtils.getIndexPosition(OrcProto.ColumnEncoding.Kind.DIRECT,OrcProto.Type.Kind.TIMESTAMP,OrcProto.Stream.Kind.DATA,true,true));
assertEquals(3,RecordReaderUtils.getIndexPosition(OrcProto.ColumnEncoding.Kind.DIRECT,OrcProto.Type.Kind.TIMESTAMP,OrcProto.Stream.Kind.DATA,false,true));
assertEquals(7,RecordReaderUtils.getIndexPosition(OrcProto.ColumnEncoding.Kind.DIRECT,OrcProto.Type.Kind.TIMESTAMP,OrcProto.Stream.Kind.SECONDARY,true,true));
assertEquals(5,RecordReaderUtils.getIndexPosition(OrcProto.ColumnEncoding.Kind.DIRECT,OrcProto.Type.Kind.TIMESTAMP,OrcProto.Stream.Kind.SECONDARY,false,true));
}
InternalCallVerifier EqualityVerifier
@Test public void testTimestampEqualsBloomFilter() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.EQUALS,PredicateLeaf.Type.TIMESTAMP,"x",new Timestamp(15),null);
BloomFilterIO bf=new BloomFilterIO(10000);
for (int i=20; i < 1000; i++) {
bf.addLong((new Timestamp(i)).getTime());
}
ColumnStatistics cs=ColumnStatisticsImpl.deserialize(createTimestampStats(10,100));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addLong((new Timestamp(15)).getTime());
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
}
InternalCallVerifier EqualityVerifier
@Test public void testDecimalNullSafeEqualsBloomFilter() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.DECIMAL,"x",new HiveDecimalWritable("15"),null);
BloomFilterIO bf=new BloomFilterIO(10000);
for (int i=20; i < 1000; i++) {
bf.addString(HiveDecimal.create(i).toString());
}
ColumnStatistics cs=ColumnStatisticsImpl.deserialize(createDecimalStats("10","200"));
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addString(HiveDecimal.create(15).toString());
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
}
APIUtilityVerifier EqualityVerifier
@Test public void testBetweenWithNullInStats() throws Exception {
List args=new ArrayList();
args.add("c");
args.add("f");
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.BETWEEN,PredicateLeaf.Type.STRING,"x",null,args);
assertEquals(TruthValue.YES_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("d","e",true),pred,null));
assertEquals(TruthValue.YES_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("e","f",true),pred,null));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("h","g",true),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("f","g",true),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("e","g",true),pred,null));
assertEquals(TruthValue.YES_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("c","e",true),pred,null));
assertEquals(TruthValue.YES_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("c","f",true),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("c","g",true),pred,null));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("a","b",true),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("a","c",true),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("b","d",true),pred,null));
assertEquals(TruthValue.YES_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("c","c",true),pred,null));
}
EqualityVerifier
@Test public void testPredEvalWithStringStats() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.LONG,"x",100L,null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createStringStats("10","1000"),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.FLOAT,"x",100.0,null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createStringStats("10","1000"),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.STRING,"x","100",null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createStringStats("10","1000"),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.DATE,"x",new DateWritable(100).get(),null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createDateStats(10,1000),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.DECIMAL,"x",new HiveDecimalWritable("100"),null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createStringStats("10","1000"),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.TIMESTAMP,"x",new Timestamp(100),null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createStringStats("10","1000"),pred,null));
}
EqualityVerifier
@Test public void testNullSafeEqualsWithNullInStats() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.STRING,"x","c",null);
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createStringStats("d","e",true),pred,null));
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createStringStats("a","b",true),pred,null));
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createStringStats("b","c",true),pred,null));
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createStringStats("c","d",true),pred,null));
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createStringStats("b","d",true),pred,null));
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createStringStats("c","c",true),pred,null));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testStringInBloomFilter() throws Exception {
List args=new ArrayList();
args.add("str_15");
args.add("str_19");
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.IN,PredicateLeaf.Type.STRING,"x",null,args);
BloomFilterIO bf=new BloomFilterIO(10000);
for (int i=20; i < 1000; i++) {
bf.addString("str_" + i);
}
ColumnStatistics cs=ColumnStatisticsImpl.deserialize(createStringStats("str_10","str_200"));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addString("str_19");
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addString("str_15");
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
}
InternalCallVerifier EqualityVerifier
@Test public void testDateWritableNullSafeEqualsBloomFilter() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.DATE,"x",new DateWritable(15).get(),null);
BloomFilterIO bf=new BloomFilterIO(10000);
for (int i=20; i < 1000; i++) {
bf.addLong((new DateWritable(i)).getDays());
}
ColumnStatistics cs=ColumnStatisticsImpl.deserialize(createDateStats(10,100));
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addLong((new DateWritable(15)).getDays());
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
}
APIUtilityVerifier EqualityVerifier
@Test public void testIn() throws Exception {
List args=new ArrayList();
args.add(10L);
args.add(20L);
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.IN,PredicateLeaf.Type.LONG,"x",null,args);
assertEquals(TruthValue.YES_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(20L,20L),pred,null));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(30L,30L),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(10L,30L),pred,null));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(12L,18L),pred,null));
}
EqualityVerifier
@Test public void testCompareToRangeInt() throws Exception {
assertEquals(Location.BEFORE,RecordReaderImpl.compareToRange(19L,20L,40L));
assertEquals(Location.AFTER,RecordReaderImpl.compareToRange(41L,20L,40L));
assertEquals(Location.MIN,RecordReaderImpl.compareToRange(20L,20L,40L));
assertEquals(Location.MIDDLE,RecordReaderImpl.compareToRange(21L,20L,40L));
assertEquals(Location.MAX,RecordReaderImpl.compareToRange(40L,20L,40L));
assertEquals(Location.BEFORE,RecordReaderImpl.compareToRange(0L,1L,1L));
assertEquals(Location.MIN,RecordReaderImpl.compareToRange(1L,1L,1L));
assertEquals(Location.AFTER,RecordReaderImpl.compareToRange(2L,1L,1L));
}
EqualityVerifier
@Test public void testLessThan() throws Exception {
PredicateLeaf lessThan=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.LESS_THAN,PredicateLeaf.Type.LONG,"x",15L,null);
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(20L,30L),lessThan,null));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(15L,30L),lessThan,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(10L,30L),lessThan,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(10L,15L),lessThan,null));
assertEquals(TruthValue.YES_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(0L,10L),lessThan,null));
}
EqualityVerifier
@Test public void testPredEvalWithDoubleStats() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.LONG,"x",15L,null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createDoubleStats(10.0,100.0),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.FLOAT,"x",15.0,null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createDoubleStats(10.0,100.0),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.STRING,"x","15",null);
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createDoubleStats(10.0,100.0),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.DATE,"x",new DateWritable(15).get(),null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createDoubleStats(10.0,100.0),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.DECIMAL,"x",new HiveDecimalWritable("15"),null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createDoubleStats(10.0,100.0),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.TIMESTAMP,"x",new Timestamp(15 * 1000L),null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createDoubleStats(10.0,100.0),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.TIMESTAMP,"x",new Timestamp(150 * 1000L),null);
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createDoubleStats(10.0,100.0),pred,null));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testIntInBloomFilter() throws Exception {
List args=new ArrayList();
args.add(15L);
args.add(19L);
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.IN,PredicateLeaf.Type.LONG,"x",null,args);
BloomFilterIO bf=new BloomFilterIO(10000);
for (int i=20; i < 1000; i++) {
bf.addLong(i);
}
ColumnStatistics cs=ColumnStatisticsImpl.deserialize(createIntStats(10,100));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addLong(19);
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addLong(15);
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
}
EqualityVerifier
@Test public void testIsNullWithNullInStats() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.IS_NULL,PredicateLeaf.Type.STRING,"x",null,null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createStringStats("c","d",true),pred,null));
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createStringStats("c","d",false),pred,null));
}
InternalCallVerifier EqualityVerifier
@Test public void testStringEqualsBloomFilter() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.EQUALS,PredicateLeaf.Type.STRING,"x","str_15",null);
BloomFilterIO bf=new BloomFilterIO(10000);
for (int i=20; i < 1000; i++) {
bf.addString("str_" + i);
}
ColumnStatistics cs=ColumnStatisticsImpl.deserialize(createStringStats("str_10","str_200"));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addString("str_15");
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
}
InternalCallVerifier EqualityVerifier
@Test public void testTimestampNullSafeEqualsBloomFilter() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.TIMESTAMP,"x",new Timestamp(15),null);
BloomFilterIO bf=new BloomFilterIO(10000);
for (int i=20; i < 1000; i++) {
bf.addLong((new Timestamp(i)).getTime());
}
ColumnStatistics cs=ColumnStatisticsImpl.deserialize(createTimestampStats(10,100));
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addLong((new Timestamp(15)).getTime());
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
}
InternalCallVerifier EqualityVerifier
@Test public void testIntEqualsBloomFilter() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.EQUALS,PredicateLeaf.Type.LONG,"x",15L,null);
BloomFilterIO bf=new BloomFilterIO(10000);
for (int i=20; i < 1000; i++) {
bf.addLong(i);
}
ColumnStatistics cs=ColumnStatisticsImpl.deserialize(createIntStats(10,100));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addLong(15);
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDateWritableInBloomFilter() throws Exception {
List args=new ArrayList();
args.add(new DateWritable(15).get());
args.add(new DateWritable(19).get());
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.IN,PredicateLeaf.Type.DATE,"x",null,args);
BloomFilterIO bf=new BloomFilterIO(10000);
for (int i=20; i < 1000; i++) {
bf.addLong((new DateWritable(i)).getDays());
}
ColumnStatistics cs=ColumnStatisticsImpl.deserialize(createDateStats(10,100));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addLong((new DateWritable(19)).getDays());
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addLong((new DateWritable(15)).getDays());
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
}
APIUtilityVerifier EqualityVerifier
@Test public void testInWithNullInStats() throws Exception {
List args=new ArrayList();
args.add("c");
args.add("f");
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.IN,PredicateLeaf.Type.STRING,"x",null,args);
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("d","e",true),pred,null));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("a","b",true),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("e","f",true),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("c","d",true),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("b","d",true),pred,null));
assertEquals(TruthValue.YES_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("c","c",true),pred,null));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDecimalInBloomFilter() throws Exception {
List args=new ArrayList();
args.add(new HiveDecimalWritable("15"));
args.add(new HiveDecimalWritable("19"));
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.IN,PredicateLeaf.Type.DECIMAL,"x",null,args);
BloomFilterIO bf=new BloomFilterIO(10000);
for (int i=20; i < 1000; i++) {
bf.addString(HiveDecimal.create(i).toString());
}
ColumnStatistics cs=ColumnStatisticsImpl.deserialize(createDecimalStats("10","200"));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addString(HiveDecimal.create(19).toString());
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addString(HiveDecimal.create(15).toString());
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
}
EqualityVerifier
@Test public void testLessThanEqualsWithNullInStats() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.LESS_THAN_EQUALS,PredicateLeaf.Type.STRING,"x","c",null);
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("d","e",true),pred,null));
assertEquals(TruthValue.YES_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("a","b",true),pred,null));
assertEquals(TruthValue.YES_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("b","c",true),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("c","d",true),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("b","d",true),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createStringStats("c","c",true),pred,null));
}
EqualityVerifier
@Test public void testEquals() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.EQUALS,PredicateLeaf.Type.LONG,"x",15L,null);
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(20L,30L),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(15L,30L),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(10L,30L),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(10L,15L),pred,null));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(0L,10L),pred,null));
assertEquals(TruthValue.YES_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(15L,15L),pred,null));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDoubleInBloomFilter() throws Exception {
List args=new ArrayList();
args.add(15.0);
args.add(19.0);
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.IN,PredicateLeaf.Type.FLOAT,"x",null,args);
BloomFilterIO bf=new BloomFilterIO(10000);
for (int i=20; i < 1000; i++) {
bf.addDouble(i);
}
ColumnStatistics cs=ColumnStatisticsImpl.deserialize(createDoubleStats(10.0,100.0));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addDouble(19.0);
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addDouble(15.0);
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
}
EqualityVerifier
@Test public void testPredEvalWithBooleanStats() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.BOOLEAN,"x",true,null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createBooleanStats(10,10),pred,null));
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createBooleanStats(10,0),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.BOOLEAN,"x",true,null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createBooleanStats(10,10),pred,null));
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createBooleanStats(10,0),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.BOOLEAN,"x",false,null);
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createBooleanStats(10,10),pred,null));
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createBooleanStats(10,0),pred,null));
}
InternalCallVerifier EqualityVerifier
@Test public void testStringNullSafeEqualsBloomFilter() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.STRING,"x","str_15",null);
BloomFilterIO bf=new BloomFilterIO(10000);
for (int i=20; i < 1000; i++) {
bf.addString("str_" + i);
}
ColumnStatistics cs=ColumnStatisticsImpl.deserialize(createStringStats("str_10","str_200"));
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addString("str_15");
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
}
EqualityVerifier
@Test public void testIsNull() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.IS_NULL,PredicateLeaf.Type.LONG,"x",null,null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createIntStats(20L,30L),pred,null));
}
EqualityVerifier
@Test public void testPredEvalWithDecimalStats() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.LONG,"x",15L,null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createDecimalStats("10.0","100.0"),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.FLOAT,"x",15.0,null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createDecimalStats("10.0","100.0"),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.STRING,"x","15",null);
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createDecimalStats("10.0","100.0"),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.DATE,"x",new DateWritable(15).get(),null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createDecimalStats("10.0","100.0"),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.DECIMAL,"x",new HiveDecimalWritable("15"),null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createDecimalStats("10.0","100.0"),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.TIMESTAMP,"x",new Timestamp(15 * 1000L),null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createDecimalStats("10.0","100.0"),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.TIMESTAMP,"x",new Timestamp(150 * 1000L),null);
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createDecimalStats("10.0","100.0"),pred,null));
}
EqualityVerifier
@Test public void testCompareToRangeString() throws Exception {
assertEquals(Location.BEFORE,RecordReaderImpl.compareToRange("a","b","c"));
assertEquals(Location.AFTER,RecordReaderImpl.compareToRange("d","b","c"));
assertEquals(Location.MIN,RecordReaderImpl.compareToRange("b","b","c"));
assertEquals(Location.MIDDLE,RecordReaderImpl.compareToRange("bb","b","c"));
assertEquals(Location.MAX,RecordReaderImpl.compareToRange("c","b","c"));
assertEquals(Location.BEFORE,RecordReaderImpl.compareToRange("a","b","b"));
assertEquals(Location.MIN,RecordReaderImpl.compareToRange("b","b","b"));
assertEquals(Location.AFTER,RecordReaderImpl.compareToRange("c","b","b"));
}
APIUtilityVerifier EqualityVerifier
@Test public void testBetween() throws Exception {
List args=new ArrayList();
args.add(10L);
args.add(20L);
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.BETWEEN,PredicateLeaf.Type.LONG,"x",null,args);
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(0L,5L),pred,null));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(30L,40L),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(5L,15L),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(15L,25L),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(5L,25L),pred,null));
assertEquals(TruthValue.YES_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(10L,20L),pred,null));
assertEquals(TruthValue.YES_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(12L,18L),pred,null));
}
InternalCallVerifier EqualityVerifier
@Test public void testDoubleEqualsBloomFilter() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.EQUALS,PredicateLeaf.Type.FLOAT,"x",15.0,null);
BloomFilterIO bf=new BloomFilterIO(10000);
for (int i=20; i < 1000; i++) {
bf.addDouble(i);
}
ColumnStatistics cs=ColumnStatisticsImpl.deserialize(createDoubleStats(10.0,100.0));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addDouble(15.0);
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
}
EqualityVerifier
@Test public void testLessThanEquals() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.LESS_THAN_EQUALS,PredicateLeaf.Type.LONG,"x",15L,null);
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(20L,30L),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(15L,30L),pred,null));
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(10L,30L),pred,null));
assertEquals(TruthValue.YES_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(10L,15L),pred,null));
assertEquals(TruthValue.YES_NULL,RecordReaderImpl.evaluatePredicateProto(createIntStats(0L,10L),pred,null));
}
InternalCallVerifier EqualityVerifier
@Test public void testDoubleNullSafeEqualsBloomFilter() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.FLOAT,"x",15.0,null);
BloomFilterIO bf=new BloomFilterIO(10000);
for (int i=20; i < 1000; i++) {
bf.addDouble(i);
}
ColumnStatistics cs=ColumnStatisticsImpl.deserialize(createDoubleStats(10.0,100.0));
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addDouble(15.0);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
}
EqualityVerifier
@Test public void testNullSafeEquals() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.LONG,"x",15L,null);
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createIntStats(20L,30L),pred,null));
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createIntStats(15L,30L),pred,null));
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createIntStats(10L,30L),pred,null));
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createIntStats(10L,15L),pred,null));
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createIntStats(0L,10L),pred,null));
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createIntStats(15L,15L),pred,null));
}
EqualityVerifier
@Test public void testPredEvalWithIntStats() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.LONG,"x",15L,null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createIntStats(10,100),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.FLOAT,"x",15.0,null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createIntStats(10,100),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.STRING,"x","15",null);
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createIntStats(10,100),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.DATE,"x",new DateWritable(15).get(),null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createIntStats(10,100),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.DECIMAL,"x",new HiveDecimalWritable("15"),null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createIntStats(10,100),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.TIMESTAMP,"x",new Timestamp(15),null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createIntStats(10,100),pred,null));
}
EqualityVerifier
@Test public void testGetMin() throws Exception {
assertEquals(10L,RecordReaderImpl.getMin(ColumnStatisticsImpl.deserialize(createIntStats(10L,100L))));
assertEquals(10.0d,RecordReaderImpl.getMin(ColumnStatisticsImpl.deserialize(OrcProto.ColumnStatistics.newBuilder().setDoubleStatistics(OrcProto.DoubleStatistics.newBuilder().setMinimum(10.0d).setMaximum(100.0d).build()).build())));
assertEquals(null,RecordReaderImpl.getMin(ColumnStatisticsImpl.deserialize(OrcProto.ColumnStatistics.newBuilder().setStringStatistics(OrcProto.StringStatistics.newBuilder().build()).build())));
assertEquals("a",RecordReaderImpl.getMin(ColumnStatisticsImpl.deserialize(OrcProto.ColumnStatistics.newBuilder().setStringStatistics(OrcProto.StringStatistics.newBuilder().setMinimum("a").setMaximum("b").build()).build())));
assertEquals("hello",RecordReaderImpl.getMin(ColumnStatisticsImpl.deserialize(createStringStats("hello","world"))));
assertEquals(HiveDecimal.create("111.1"),RecordReaderImpl.getMin(ColumnStatisticsImpl.deserialize(createDecimalStats("111.1","112.1"))));
}
InternalCallVerifier EqualityVerifier
@Test public void testDecimalEqualsBloomFilter() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.EQUALS,PredicateLeaf.Type.DECIMAL,"x",new HiveDecimalWritable("15"),null);
BloomFilterIO bf=new BloomFilterIO(10000);
for (int i=20; i < 1000; i++) {
bf.addString(HiveDecimal.create(i).toString());
}
ColumnStatistics cs=ColumnStatisticsImpl.deserialize(createDecimalStats("10","200"));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addString(HiveDecimal.create(15).toString());
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testTimestampInBloomFilter() throws Exception {
List args=new ArrayList();
args.add(new Timestamp(15));
args.add(new Timestamp(19));
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.IN,PredicateLeaf.Type.TIMESTAMP,"x",null,args);
BloomFilterIO bf=new BloomFilterIO(10000);
for (int i=20; i < 1000; i++) {
bf.addLong((new Timestamp(i)).getTime());
}
ColumnStatistics cs=ColumnStatisticsImpl.deserialize(createTimestampStats(10,100));
assertEquals(TruthValue.NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addLong((new Timestamp(19)).getTime());
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
bf.addLong((new Timestamp(15)).getTime());
assertEquals(TruthValue.YES_NO_NULL,RecordReaderImpl.evaluatePredicate(cs,pred,bf));
}
EqualityVerifier
@Test public void testGetMax() throws Exception {
assertEquals(100L,RecordReaderImpl.getMax(ColumnStatisticsImpl.deserialize(createIntStats(10L,100L))));
assertEquals(100.0d,RecordReaderImpl.getMax(ColumnStatisticsImpl.deserialize(OrcProto.ColumnStatistics.newBuilder().setDoubleStatistics(OrcProto.DoubleStatistics.newBuilder().setMinimum(10.0d).setMaximum(100.0d).build()).build())));
assertEquals(null,RecordReaderImpl.getMax(ColumnStatisticsImpl.deserialize(OrcProto.ColumnStatistics.newBuilder().setStringStatistics(OrcProto.StringStatistics.newBuilder().build()).build())));
assertEquals("b",RecordReaderImpl.getMax(ColumnStatisticsImpl.deserialize(OrcProto.ColumnStatistics.newBuilder().setStringStatistics(OrcProto.StringStatistics.newBuilder().setMinimum("a").setMaximum("b").build()).build())));
assertEquals("world",RecordReaderImpl.getMax(ColumnStatisticsImpl.deserialize(createStringStats("hello","world"))));
assertEquals(HiveDecimal.create("112.1"),RecordReaderImpl.getMax(ColumnStatisticsImpl.deserialize(createDecimalStats("111.1","112.1"))));
}
EqualityVerifier
@Test public void testPredEvalWithDateStats() throws Exception {
PredicateLeaf pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.LONG,"x",15L,null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createDateStats(10,100),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.FLOAT,"x",15.0,null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createDateStats(10,100),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.STRING,"x","15",null);
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createDateStats(10,100),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.STRING,"x","1970-01-11",null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createDateStats(10,100),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.STRING,"x","15.1",null);
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createDateStats(10,100),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.STRING,"x","__a15__1",null);
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createDateStats(10,100),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.STRING,"x","2000-01-16",null);
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createDateStats(10,100),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.STRING,"x","1970-01-16",null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createDateStats(10,100),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.DATE,"x",new DateWritable(15).get(),null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createDateStats(10,100),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.DATE,"x",new DateWritable(150).get(),null);
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createDateStats(10,100),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.DECIMAL,"x",new HiveDecimalWritable("15"),null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createDateStats(10,100),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.TIMESTAMP,"x",new Timestamp(15),null);
assertEquals(TruthValue.NO,RecordReaderImpl.evaluatePredicateProto(createDateStats(10,100),pred,null));
pred=TestSearchArgumentImpl.createPredicateLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS,PredicateLeaf.Type.TIMESTAMP,"x",new Timestamp(15L * 24L * 60L* 60L* 1000L),null);
assertEquals(TruthValue.YES_NO,RecordReaderImpl.evaluatePredicateProto(createDateStats(10,100),pred,null));
}
Class: org.apache.hadoop.hive.ql.io.orc.TestStreamName InternalCallVerifier EqualityVerifier
@Test public void test1() throws Exception {
StreamName s1=new StreamName(3,OrcProto.Stream.Kind.DATA);
StreamName s2=new StreamName(3,OrcProto.Stream.Kind.DICTIONARY_DATA);
StreamName s3=new StreamName(5,OrcProto.Stream.Kind.DATA);
StreamName s4=new StreamName(5,OrcProto.Stream.Kind.DICTIONARY_DATA);
StreamName s1p=new StreamName(3,OrcProto.Stream.Kind.DATA);
assertEquals(true,s1.equals(s1));
assertEquals(false,s1.equals(s2));
assertEquals(false,s1.equals(s3));
assertEquals(true,s1.equals(s1p));
assertEquals(true,s1.compareTo(null) < 0);
assertEquals(false,s1.equals(null));
assertEquals(true,s1.compareTo(s2) < 0);
assertEquals(true,s2.compareTo(s3) < 0);
assertEquals(true,s3.compareTo(s4) < 0);
assertEquals(true,s4.compareTo(s1p) > 0);
assertEquals(0,s1p.compareTo(s1));
}
Class: org.apache.hadoop.hive.ql.io.orc.TestStringDictionary APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testHalfDistinct() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Text.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).compress(CompressionKind.NONE).bufferSize(10000));
Random rand=new Random(123);
int[] input=new int[20000];
for (int i=0; i < 20000; i++) {
input[i]=rand.nextInt(10000);
}
for (int i=0; i < 20000; i++) {
writer.addRow(new Text(String.valueOf(input[i])));
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(new Text(String.valueOf(input[idx++])),row);
}
for ( StripeInformation stripe : reader.getStripes()) {
OrcProto.StripeFooter footer=((RecordReaderImpl)rows).readStripeFooter(stripe);
for (int i=0; i < footer.getColumnsCount(); ++i) {
OrcProto.ColumnEncoding encoding=footer.getColumns(i);
assertEquals(OrcProto.ColumnEncoding.Kind.DICTIONARY_V2,encoding.getKind());
}
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testHalfDistinctCheckDisabled() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Text.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
conf.setBoolean(ConfVars.HIVE_ORC_ROW_INDEX_STRIDE_DICTIONARY_CHECK.varname,false);
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).compress(CompressionKind.NONE).bufferSize(10000));
Random rand=new Random(123);
int[] input=new int[20000];
for (int i=0; i < 20000; i++) {
input[i]=rand.nextInt(10000);
}
for (int i=0; i < 20000; i++) {
writer.addRow(new Text(String.valueOf(input[i])));
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(new Text(String.valueOf(input[idx++])),row);
}
for ( StripeInformation stripe : reader.getStripes()) {
OrcProto.StripeFooter footer=((RecordReaderImpl)rows).readStripeFooter(stripe);
for (int i=0; i < footer.getColumnsCount(); ++i) {
OrcProto.ColumnEncoding encoding=footer.getColumns(i);
assertEquals(OrcProto.ColumnEncoding.Kind.DICTIONARY_V2,encoding.getKind());
}
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testTooManyDistinctV11AlwaysDictionary() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Text.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).compress(CompressionKind.NONE).version(OrcFile.Version.V_0_11).bufferSize(10000));
for (int i=0; i < 20000; i++) {
writer.addRow(new Text(String.valueOf(i)));
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(new Text(String.valueOf(idx++)),row);
}
for ( StripeInformation stripe : reader.getStripes()) {
OrcProto.StripeFooter footer=((RecordReaderImpl)rows).readStripeFooter(stripe);
for (int i=0; i < footer.getColumnsCount(); ++i) {
OrcProto.ColumnEncoding encoding=footer.getColumns(i);
assertEquals(OrcProto.ColumnEncoding.Kind.DICTIONARY,encoding.getKind());
}
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testTooManyDistinct() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Text.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).compress(CompressionKind.NONE).bufferSize(10000));
for (int i=0; i < 20000; i++) {
writer.addRow(new Text(String.valueOf(i)));
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(new Text(String.valueOf(idx++)),row);
}
for ( StripeInformation stripe : reader.getStripes()) {
OrcProto.StripeFooter footer=((RecordReaderImpl)rows).readStripeFooter(stripe);
for (int i=0; i < footer.getColumnsCount(); ++i) {
OrcProto.ColumnEncoding encoding=footer.getColumns(i);
assertEquals(OrcProto.ColumnEncoding.Kind.DIRECT_V2,encoding.getKind());
}
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testTooManyDistinctCheckDisabled() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Text.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
conf.setBoolean(ConfVars.HIVE_ORC_ROW_INDEX_STRIDE_DICTIONARY_CHECK.varname,false);
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).compress(CompressionKind.NONE).bufferSize(10000));
for (int i=0; i < 20000; i++) {
writer.addRow(new Text(String.valueOf(i)));
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(new Text(String.valueOf(idx++)),row);
}
for ( StripeInformation stripe : reader.getStripes()) {
OrcProto.StripeFooter footer=((RecordReaderImpl)rows).readStripeFooter(stripe);
for (int i=0; i < footer.getColumnsCount(); ++i) {
OrcProto.ColumnEncoding encoding=footer.getColumns(i);
assertEquals(OrcProto.ColumnEncoding.Kind.DIRECT_V2,encoding.getKind());
}
}
}
Class: org.apache.hadoop.hive.ql.io.orc.TestTypeDescription InternalCallVerifier EqualityVerifier
@Test public void testJson(){
TypeDescription bin=TypeDescription.createBinary();
assertEquals("{\"category\": \"binary\", \"id\": 0, \"max\": 0}",bin.toJson());
assertEquals("binary",bin.toString());
TypeDescription struct=TypeDescription.createStruct().addField("f1",TypeDescription.createInt()).addField("f2",TypeDescription.createString()).addField("f3",TypeDescription.createDecimal());
assertEquals("struct",struct.toString());
assertEquals("{\"category\": \"struct\", \"id\": 0, \"max\": 3, \"fields\": [\n" + " \"f1\": {\"category\": \"int\", \"id\": 1, \"max\": 1},\n" + " \"f2\": {\"category\": \"string\", \"id\": 2, \"max\": 2},\n"+ " \"f3\": {\"category\": \"decimal\", \"id\": 3, \"max\": 3, \"precision\": 38, \"scale\": 10}]}",struct.toJson());
struct=TypeDescription.createStruct().addField("f1",TypeDescription.createUnion().addUnionChild(TypeDescription.createByte()).addUnionChild(TypeDescription.createDecimal().withPrecision(20).withScale(10))).addField("f2",TypeDescription.createStruct().addField("f3",TypeDescription.createDate()).addField("f4",TypeDescription.createDouble()).addField("f5",TypeDescription.createBoolean())).addField("f6",TypeDescription.createChar().withMaxLength(100));
assertEquals("struct,f2:struct,f6:char(100)>",struct.toString());
assertEquals("{\"category\": \"struct\", \"id\": 0, \"max\": 8, \"fields\": [\n" + " \"f1\": {\"category\": \"union\", \"id\": 1, \"max\": 3, \"children\": [\n" + " {\"category\": \"tinyint\", \"id\": 2, \"max\": 2},\n"+ " {\"category\": \"decimal\", \"id\": 3, \"max\": 3, \"precision\": 20, \"scale\": 10}]},\n"+ " \"f2\": {\"category\": \"struct\", \"id\": 4, \"max\": 7, \"fields\": [\n"+ " \"f3\": {\"category\": \"date\", \"id\": 5, \"max\": 5},\n"+ " \"f4\": {\"category\": \"double\", \"id\": 6, \"max\": 6},\n"+ " \"f5\": {\"category\": \"boolean\", \"id\": 7, \"max\": 7}]},\n"+ " \"f6\": {\"category\": \"char\", \"id\": 8, \"max\": 8, \"length\": 100}]}",struct.toJson());
}
Class: org.apache.hadoop.hive.ql.io.orc.TestUnrolledBitPack APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testBitPacking() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Long.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
long[] inp=new long[]{val,0,val,val,0,val,0,val,val,0,val,0,val,val,0,0,val,val,0,val,0,0,val,0,val,0,val,0,0,val,0,val,0,val,0,0,val,0,val,0,val,0,0,val,0,val,0,val,0,0,val,0,val,0,val,0,0,val,0,val,0,val,0,0,val,0,val,0,val,0,0,val,0,val,0,val,0,0,val,0,val,0,val,0,0,val,0,val,0,val,0,0,val,0,val,0,0,val,val};
List input=Lists.newArrayList(Longs.asList(inp));
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).compress(CompressionKind.NONE).bufferSize(10000));
for ( Long l : input) {
writer.addRow(l);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(input.get(idx++).longValue(),((LongWritable)row).get());
}
}
Class: org.apache.hadoop.hive.ql.io.orc.TestVectorOrcFile APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testSeek() throws Exception {
TypeDescription schema=createBigRowSchema();
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).setSchema(schema).stripeSize(200000).bufferSize(65536).rowIndexStride(1000));
VectorizedRowBatch batch=schema.createRowBatch();
Random rand=new Random(42);
final int COUNT=32768;
long[] intValues=new long[COUNT];
double[] doubleValues=new double[COUNT];
String[] stringValues=new String[COUNT];
BytesWritable[] byteValues=new BytesWritable[COUNT];
String[] words=new String[128];
for (int i=0; i < words.length; ++i) {
words[i]=Integer.toHexString(rand.nextInt());
}
for (int i=0; i < COUNT / 2; ++i) {
intValues[2 * i]=rand.nextLong();
intValues[2 * i + 1]=intValues[2 * i];
stringValues[2 * i]=words[rand.nextInt(words.length)];
stringValues[2 * i + 1]=stringValues[2 * i];
}
for (int i=0; i < COUNT; ++i) {
doubleValues[i]=rand.nextDouble();
byte[] buf=new byte[20];
rand.nextBytes(buf);
byteValues[i]=new BytesWritable(buf);
}
for (int i=0; i < COUNT; ++i) {
appendRandomRow(batch,intValues,doubleValues,stringValues,byteValues,words,i);
if (batch.size == 1024) {
writer.addRowBatch(batch);
batch.reset();
}
}
if (batch.size != 0) {
writer.addRowBatch(batch);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
assertEquals(COUNT,reader.getNumberOfRows());
RecordReader rows=reader.rows();
MetadataReader meta=((RecordReaderImpl)rows).getMetadataReader();
OrcIndex index=meta.readRowIndex(reader.getStripes().get(0),null,null,null,null,null);
for (int c=1; c < 9; ++c) {
OrcProto.RowIndex colIndex=index.getRowGroupIndex()[c];
assertEquals(1000,colIndex.getEntry(0).getStatistics().getNumberOfValues());
}
OrcStruct row=null;
for (int i=COUNT - 1; i >= 0; --i) {
rows.seekToRow(i);
row=(OrcStruct)rows.next(row);
BigRow expected=createRandomRow(intValues,doubleValues,stringValues,byteValues,words,i);
assertEquals(expected.boolean1.booleanValue(),((BooleanWritable)row.getFieldValue(0)).get());
assertEquals(expected.byte1.byteValue(),((ByteWritable)row.getFieldValue(1)).get());
assertEquals(expected.short1.shortValue(),((ShortWritable)row.getFieldValue(2)).get());
assertEquals(expected.int1.intValue(),((IntWritable)row.getFieldValue(3)).get());
assertEquals(expected.long1.longValue(),((LongWritable)row.getFieldValue(4)).get());
assertEquals(expected.float1,((FloatWritable)row.getFieldValue(5)).get(),0.0001);
assertEquals(expected.double1,((DoubleWritable)row.getFieldValue(6)).get(),0.0001);
assertEquals(expected.bytes1,row.getFieldValue(7));
assertEquals(expected.string1,row.getFieldValue(8));
List expectedList=expected.middle.list;
List actualList=(List)((OrcStruct)row.getFieldValue(9)).getFieldValue(0);
compareList(expectedList,actualList,"middle list " + i);
compareList(expected.list,(List)row.getFieldValue(10),"list " + i);
}
rows.close();
Iterator stripeIterator=reader.getStripes().iterator();
long offsetOfStripe2=0;
long offsetOfStripe4=0;
long lastRowOfStripe2=0;
for (int i=0; i < 5; ++i) {
StripeInformation stripe=stripeIterator.next();
if (i < 2) {
lastRowOfStripe2+=stripe.getNumberOfRows();
}
else if (i == 2) {
offsetOfStripe2=stripe.getOffset();
lastRowOfStripe2+=stripe.getNumberOfRows() - 1;
}
else if (i == 4) {
offsetOfStripe4=stripe.getOffset();
}
}
boolean[] columns=new boolean[reader.getStatistics().length];
columns[5]=true;
columns[9]=true;
rows=reader.rowsOptions(new Reader.Options().range(offsetOfStripe2,offsetOfStripe4 - offsetOfStripe2).include(columns));
rows.seekToRow(lastRowOfStripe2);
for (int i=0; i < 2; ++i) {
row=(OrcStruct)rows.next(row);
BigRow expected=createRandomRow(intValues,doubleValues,stringValues,byteValues,words,(int)(lastRowOfStripe2 + i));
assertEquals(expected.long1.longValue(),((LongWritable)row.getFieldValue(4)).get());
assertEquals(expected.string1,row.getFieldValue(8));
}
rows.close();
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testStripeLevelStats() throws Exception {
TypeDescription schema=TypeDescription.createStruct().addField("int1",TypeDescription.createInt()).addField("string1",TypeDescription.createString());
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000).bufferSize(10000));
VectorizedRowBatch batch=schema.createRowBatch();
batch.size=1000;
LongColumnVector field1=(LongColumnVector)batch.cols[0];
BytesColumnVector field2=(BytesColumnVector)batch.cols[1];
field1.isRepeating=true;
field2.isRepeating=true;
for (int b=0; b < 11; b++) {
if (b >= 5) {
if (b >= 10) {
field1.vector[0]=3;
field2.setVal(0,"three".getBytes());
}
else {
field1.vector[0]=2;
field2.setVal(0,"two".getBytes());
}
}
else {
field1.vector[0]=1;
field2.setVal(0,"one".getBytes());
}
writer.addRowBatch(batch);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
schema=writer.getSchema();
assertEquals(2,schema.getMaximumId());
boolean[] expected=new boolean[]{false,true,false};
boolean[] included=OrcUtils.includeColumns("int1",schema);
assertEquals(true,Arrays.equals(expected,included));
List stats=reader.getStripeStatistics();
int numStripes=stats.size();
assertEquals(3,numStripes);
StripeStatistics ss1=stats.get(0);
StripeStatistics ss2=stats.get(1);
StripeStatistics ss3=stats.get(2);
assertEquals(5000,ss1.getColumnStatistics()[0].getNumberOfValues());
assertEquals(5000,ss2.getColumnStatistics()[0].getNumberOfValues());
assertEquals(1000,ss3.getColumnStatistics()[0].getNumberOfValues());
assertEquals(5000,(ss1.getColumnStatistics()[1]).getNumberOfValues());
assertEquals(5000,(ss2.getColumnStatistics()[1]).getNumberOfValues());
assertEquals(1000,(ss3.getColumnStatistics()[1]).getNumberOfValues());
assertEquals(1,((IntegerColumnStatistics)ss1.getColumnStatistics()[1]).getMinimum());
assertEquals(2,((IntegerColumnStatistics)ss2.getColumnStatistics()[1]).getMinimum());
assertEquals(3,((IntegerColumnStatistics)ss3.getColumnStatistics()[1]).getMinimum());
assertEquals(1,((IntegerColumnStatistics)ss1.getColumnStatistics()[1]).getMaximum());
assertEquals(2,((IntegerColumnStatistics)ss2.getColumnStatistics()[1]).getMaximum());
assertEquals(3,((IntegerColumnStatistics)ss3.getColumnStatistics()[1]).getMaximum());
assertEquals(5000,((IntegerColumnStatistics)ss1.getColumnStatistics()[1]).getSum());
assertEquals(10000,((IntegerColumnStatistics)ss2.getColumnStatistics()[1]).getSum());
assertEquals(3000,((IntegerColumnStatistics)ss3.getColumnStatistics()[1]).getSum());
assertEquals(5000,(ss1.getColumnStatistics()[2]).getNumberOfValues());
assertEquals(5000,(ss2.getColumnStatistics()[2]).getNumberOfValues());
assertEquals(1000,(ss3.getColumnStatistics()[2]).getNumberOfValues());
assertEquals("one",((StringColumnStatistics)ss1.getColumnStatistics()[2]).getMinimum());
assertEquals("two",((StringColumnStatistics)ss2.getColumnStatistics()[2]).getMinimum());
assertEquals("three",((StringColumnStatistics)ss3.getColumnStatistics()[2]).getMinimum());
assertEquals("one",((StringColumnStatistics)ss1.getColumnStatistics()[2]).getMaximum());
assertEquals("two",((StringColumnStatistics)ss2.getColumnStatistics()[2]).getMaximum());
assertEquals("three",((StringColumnStatistics)ss3.getColumnStatistics()[2]).getMaximum());
assertEquals(15000,((StringColumnStatistics)ss1.getColumnStatistics()[2]).getSum());
assertEquals(15000,((StringColumnStatistics)ss2.getColumnStatistics()[2]).getSum());
assertEquals(5000,((StringColumnStatistics)ss3.getColumnStatistics()[2]).getSum());
RecordReaderImpl recordReader=(RecordReaderImpl)reader.rows();
OrcProto.RowIndex[] index=recordReader.readRowIndex(0,null,null).getRowGroupIndex();
assertEquals(3,index.length);
List items=index[1].getEntryList();
assertEquals(1,items.size());
assertEquals(3,items.get(0).getPositionsCount());
assertEquals(0,items.get(0).getPositions(0));
assertEquals(0,items.get(0).getPositions(1));
assertEquals(0,items.get(0).getPositions(2));
assertEquals(1,items.get(0).getStatistics().getIntStatistics().getMinimum());
index=recordReader.readRowIndex(1,null,null).getRowGroupIndex();
assertEquals(3,index.length);
items=index[1].getEntryList();
assertEquals(2,items.get(0).getStatistics().getIntStatistics().getMaximum());
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
/**
* A test case that tests the case where you add a repeating batch
* to a column that isn't using dictionary encoding.
* @throws Exception
*/
@Test public void testNonDictionaryRepeatingString() throws Exception {
TypeDescription schema=TypeDescription.createStruct().addField("str",TypeDescription.createString());
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).setSchema(schema).rowIndexStride(1000));
VectorizedRowBatch batch=schema.createRowBatch();
batch.size=1024;
for (int r=0; r < batch.size; ++r) {
((BytesColumnVector)batch.cols[0]).setVal(r,Integer.toString(r * 10001).getBytes());
}
writer.addRowBatch(batch);
batch.cols[0].isRepeating=true;
((BytesColumnVector)batch.cols[0]).setVal(0,"Halloween".getBytes());
writer.addRowBatch(batch);
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf));
RecordReader rows=reader.rows();
batch=rows.nextBatch(null);
assertEquals(1024,batch.size);
for (int r=0; r < 1024; ++r) {
assertEquals(Integer.toString(r * 10001),makeString((BytesColumnVector)batch.cols[0],r));
}
batch=rows.nextBatch(batch);
assertEquals(1024,batch.size);
for (int r=0; r < 1024; ++r) {
assertEquals("Halloween",makeString((BytesColumnVector)batch.cols[0],r));
}
assertEquals(false,rows.hasNext());
}
APIUtilityVerifier IterativeVerifier BranchVerifier InternalCallVerifier EqualityVerifier
@Test public void testColumnProjection() throws Exception {
TypeDescription schema=createInnerSchema();
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).setSchema(schema).stripeSize(1000).compress(CompressionKind.NONE).bufferSize(100).rowIndexStride(1000));
VectorizedRowBatch batch=schema.createRowBatch();
Random r1=new Random(1);
Random r2=new Random(2);
int x;
int minInt=0, maxInt=0;
String y;
String minStr=null, maxStr=null;
batch.size=1000;
boolean first=true;
for (int b=0; b < 21; ++b) {
for (int r=0; r < 1000; ++r) {
x=r1.nextInt();
y=Long.toHexString(r2.nextLong());
if (first || x < minInt) {
minInt=x;
}
if (first || x > maxInt) {
maxInt=x;
}
if (first || y.compareTo(minStr) < 0) {
minStr=y;
}
if (first || y.compareTo(maxStr) > 0) {
maxStr=y;
}
first=false;
((LongColumnVector)batch.cols[0]).vector[r]=x;
((BytesColumnVector)batch.cols[1]).setVal(r,y.getBytes());
}
writer.addRowBatch(batch);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
ColumnStatistics[] stats=reader.getStatistics();
assertEquals(3,stats.length);
for ( ColumnStatistics s : stats) {
assertEquals(21000,s.getNumberOfValues());
if (s instanceof IntegerColumnStatistics) {
assertEquals(minInt,((IntegerColumnStatistics)s).getMinimum());
assertEquals(maxInt,((IntegerColumnStatistics)s).getMaximum());
}
else if (s instanceof StringColumnStatistics) {
assertEquals(maxStr,((StringColumnStatistics)s).getMaximum());
assertEquals(minStr,((StringColumnStatistics)s).getMinimum());
}
}
List types=reader.getTypes();
assertEquals(3,types.size());
assertEquals(OrcProto.Type.Kind.STRUCT,types.get(0).getKind());
assertEquals(2,types.get(0).getSubtypesCount());
assertEquals(1,types.get(0).getSubtypes(0));
assertEquals(2,types.get(0).getSubtypes(1));
assertEquals(OrcProto.Type.Kind.INT,types.get(1).getKind());
assertEquals(0,types.get(1).getSubtypesCount());
assertEquals(OrcProto.Type.Kind.STRING,types.get(2).getKind());
assertEquals(0,types.get(2).getSubtypesCount());
RecordReader rows1=reader.rows(new boolean[]{true,true,false});
RecordReader rows2=reader.rows(new boolean[]{true,false,true});
r1=new Random(1);
r2=new Random(2);
OrcStruct row1=null;
OrcStruct row2=null;
for (int i=0; i < 21000; ++i) {
assertEquals(true,rows1.hasNext());
assertEquals(true,rows2.hasNext());
row1=(OrcStruct)rows1.next(row1);
row2=(OrcStruct)rows2.next(row2);
assertEquals(r1.nextInt(),((IntWritable)row1.getFieldValue(0)).get());
assertEquals(Long.toHexString(r2.nextLong()),row2.getFieldValue(1).toString());
}
assertEquals(false,rows1.hasNext());
assertEquals(false,rows2.hasNext());
rows1.close();
rows2.close();
}
APIUtilityVerifier IterativeVerifier BranchVerifier InternalCallVerifier EqualityVerifier
/**
* Test Unions.
* @throws Exception
*/
@Test public void testUnions() throws Exception {
TypeDescription schema=TypeDescription.createStruct().addField("outer",TypeDescription.createUnion().addUnionChild(TypeDescription.createInt()).addUnionChild(TypeDescription.createLong()));
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).setSchema(schema));
VectorizedRowBatch batch=schema.createRowBatch();
batch.size=1024;
UnionColumnVector outer=(UnionColumnVector)batch.cols[0];
batch.cols[0].noNulls=false;
for (int r=0; r < 1024; ++r) {
if (r < 200) {
outer.isNull[r]=true;
}
else if (r < 300) {
outer.tags[r]=0;
}
else if (r < 400) {
outer.tags[r]=1;
}
else if (r < 600) {
outer.isNull[r]=true;
}
else if (r < 800) {
outer.tags[r]=1;
}
else if (r < 1000) {
outer.isNull[r]=true;
}
else {
outer.tags[r]=1;
}
((LongColumnVector)outer.fields[0]).vector[r]=r;
((LongColumnVector)outer.fields[1]).vector[r]=-r;
}
writer.addRowBatch(batch);
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf));
RecordReader rows=reader.rows();
OrcStruct row=null;
for (int r=0; r < 1024; ++r) {
assertEquals(true,rows.hasNext());
row=(OrcStruct)rows.next(row);
OrcUnion inner=(OrcUnion)row.getFieldValue(0);
if (r < 200) {
assertEquals("row " + r,null,inner);
}
else if (r < 300) {
assertEquals("row " + r,"union(0, " + r + ")",inner.toString());
}
else if (r < 400) {
assertEquals("row " + r,"union(1, " + -r + ")",inner.toString());
}
else if (r < 600) {
assertEquals("row " + r,null,inner);
}
else if (r < 800) {
assertEquals("row " + r,"union(1, " + -r + ")",inner.toString());
}
else if (r < 1000) {
assertEquals("row " + r,null,inner);
}
else {
assertEquals("row " + r,"union(1, " + -r + ")",inner.toString());
}
}
assertEquals(false,rows.hasNext());
}
APIUtilityVerifier IterativeVerifier BranchVerifier InternalCallVerifier EqualityVerifier
/**
* Test lists and how they interact with the child column. In particular,
* put nulls between back to back lists and then make some lists that
* oper lap.
* @throws Exception
*/
@Test public void testLists() throws Exception {
TypeDescription schema=TypeDescription.createStruct().addField("list",TypeDescription.createList(TypeDescription.createLong()));
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).setSchema(schema));
VectorizedRowBatch batch=schema.createRowBatch();
batch.size=1024;
ListColumnVector list=(ListColumnVector)batch.cols[0];
list.noNulls=false;
for (int r=0; r < 1024; ++r) {
if (r < 200) {
list.isNull[r]=true;
}
else if (r < 300) {
list.offsets[r]=r - 200;
list.lengths[r]=1;
}
else if (r < 400) {
list.isNull[r]=true;
}
else if (r < 500) {
list.offsets[r]=r - 300;
list.lengths[r]=1;
}
else if (r < 600) {
list.isNull[r]=true;
}
else if (r < 700) {
list.offsets[r]=r;
list.lengths[r]=2;
}
else {
list.isNull[r]=true;
}
((LongColumnVector)list.child).vector[r]=r * 10;
}
writer.addRowBatch(batch);
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf));
RecordReader rows=reader.rows();
OrcStruct row=null;
for (int r=0; r < 1024; ++r) {
assertEquals(true,rows.hasNext());
row=(OrcStruct)rows.next(row);
List inner=(List)row.getFieldValue(0);
if (r < 200) {
assertEquals("row " + r,null,inner);
}
else if (r < 300) {
assertEquals("row " + r,"[" + ((r - 200) * 10) + "]",inner.toString());
}
else if (r < 400) {
assertEquals("row " + r,null,inner);
}
else if (r < 500) {
assertEquals("row " + r,"[" + ((r - 300) * 10) + "]",inner.toString());
}
else if (r < 600) {
assertEquals("row " + r,null,inner);
}
else if (r < 700) {
assertEquals("row " + r,"[" + (10 * r) + ", "+ (10 * (r + 1))+ "]",inner.toString());
}
else {
assertEquals("row " + r,null,inner);
}
}
assertEquals(false,rows.hasNext());
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
/**
* Read and write a randomly generated snappy file.
* @throws Exception
*/
@Test public void testWithoutIndex() throws Exception {
TypeDescription schema=createInnerSchema();
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).setSchema(schema).stripeSize(5000).compress(CompressionKind.SNAPPY).bufferSize(1000).rowIndexStride(0));
VectorizedRowBatch batch=schema.createRowBatch();
Random rand=new Random(24);
batch.size=5;
for (int c=0; c < batch.cols.length; ++c) {
batch.cols[c].setRepeating(true);
}
for (int i=0; i < 10000; ++i) {
((LongColumnVector)batch.cols[0]).vector[0]=rand.nextInt();
((BytesColumnVector)batch.cols[1]).setVal(0,Integer.toBinaryString(rand.nextInt()).getBytes());
writer.addRowBatch(batch);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
assertEquals(50000,reader.getNumberOfRows());
assertEquals(0,reader.getRowIndexStride());
StripeInformation stripe=reader.getStripes().iterator().next();
assertEquals(true,stripe.getDataLength() != 0);
assertEquals(0,stripe.getIndexLength());
RecordReader rows=reader.rows();
rand=new Random(24);
OrcStruct row=null;
for (int i=0; i < 10000; ++i) {
int intVal=rand.nextInt();
String strVal=Integer.toBinaryString(rand.nextInt());
for (int j=0; j < 5; ++j) {
assertEquals(true,rows.hasNext());
row=(OrcStruct)rows.next(row);
assertEquals(intVal,((IntWritable)row.getFieldValue(0)).get());
assertEquals(strVal,row.getFieldValue(1).toString());
}
}
assertEquals(false,rows.hasNext());
rows.close();
}
APIUtilityVerifier BranchVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testReadFormat_0_11() throws Exception {
Path oldFilePath=new Path(HiveTestUtils.getFileFromClasspath("orc-file-11-format.orc"));
Reader reader=OrcFile.createReader(oldFilePath,OrcFile.readerOptions(conf).filesystem(fs));
int stripeCount=0;
int rowCount=0;
long currentOffset=-1;
for ( StripeInformation stripe : reader.getStripes()) {
stripeCount+=1;
rowCount+=stripe.getNumberOfRows();
if (currentOffset < 0) {
currentOffset=stripe.getOffset() + stripe.getIndexLength() + stripe.getDataLength()+ stripe.getFooterLength();
}
else {
assertEquals(currentOffset,stripe.getOffset());
currentOffset+=stripe.getIndexLength() + stripe.getDataLength() + stripe.getFooterLength();
}
}
assertEquals(reader.getNumberOfRows(),rowCount);
assertEquals(2,stripeCount);
ColumnStatistics[] stats=reader.getStatistics();
assertEquals(7500,stats[1].getNumberOfValues());
assertEquals(3750,((BooleanColumnStatistics)stats[1]).getFalseCount());
assertEquals(3750,((BooleanColumnStatistics)stats[1]).getTrueCount());
assertEquals("count: 7500 hasNull: true true: 3750",stats[1].toString());
assertEquals(2048,((IntegerColumnStatistics)stats[3]).getMaximum());
assertEquals(1024,((IntegerColumnStatistics)stats[3]).getMinimum());
assertEquals(true,((IntegerColumnStatistics)stats[3]).isSumDefined());
assertEquals(11520000,((IntegerColumnStatistics)stats[3]).getSum());
assertEquals("count: 7500 hasNull: true min: 1024 max: 2048 sum: 11520000",stats[3].toString());
assertEquals(Long.MAX_VALUE,((IntegerColumnStatistics)stats[5]).getMaximum());
assertEquals(Long.MAX_VALUE,((IntegerColumnStatistics)stats[5]).getMinimum());
assertEquals(false,((IntegerColumnStatistics)stats[5]).isSumDefined());
assertEquals("count: 7500 hasNull: true min: 9223372036854775807 max: 9223372036854775807",stats[5].toString());
assertEquals(-15.0,((DoubleColumnStatistics)stats[7]).getMinimum());
assertEquals(-5.0,((DoubleColumnStatistics)stats[7]).getMaximum());
assertEquals(-75000.0,((DoubleColumnStatistics)stats[7]).getSum(),0.00001);
assertEquals("count: 7500 hasNull: true min: -15.0 max: -5.0 sum: -75000.0",stats[7].toString());
assertEquals("count: 7500 hasNull: true min: bye max: hi sum: 0",stats[9].toString());
StructObjectInspector readerInspector=(StructObjectInspector)reader.getObjectInspector();
assertEquals(ObjectInspector.Category.STRUCT,readerInspector.getCategory());
assertEquals("struct>>,list:array>,"+ "map:map>,ts:timestamp,"+ "decimal1:decimal(38,18)>",readerInspector.getTypeName());
List extends StructField> fields=readerInspector.getAllStructFieldRefs();
BooleanObjectInspector bo=(BooleanObjectInspector)readerInspector.getStructFieldRef("boolean1").getFieldObjectInspector();
ByteObjectInspector by=(ByteObjectInspector)readerInspector.getStructFieldRef("byte1").getFieldObjectInspector();
ShortObjectInspector sh=(ShortObjectInspector)readerInspector.getStructFieldRef("short1").getFieldObjectInspector();
IntObjectInspector in=(IntObjectInspector)readerInspector.getStructFieldRef("int1").getFieldObjectInspector();
LongObjectInspector lo=(LongObjectInspector)readerInspector.getStructFieldRef("long1").getFieldObjectInspector();
FloatObjectInspector fl=(FloatObjectInspector)readerInspector.getStructFieldRef("float1").getFieldObjectInspector();
DoubleObjectInspector dbl=(DoubleObjectInspector)readerInspector.getStructFieldRef("double1").getFieldObjectInspector();
BinaryObjectInspector bi=(BinaryObjectInspector)readerInspector.getStructFieldRef("bytes1").getFieldObjectInspector();
StringObjectInspector st=(StringObjectInspector)readerInspector.getStructFieldRef("string1").getFieldObjectInspector();
StructObjectInspector mid=(StructObjectInspector)readerInspector.getStructFieldRef("middle").getFieldObjectInspector();
List extends StructField> midFields=mid.getAllStructFieldRefs();
ListObjectInspector midli=(ListObjectInspector)midFields.get(0).getFieldObjectInspector();
StructObjectInspector inner=(StructObjectInspector)midli.getListElementObjectInspector();
List extends StructField> inFields=inner.getAllStructFieldRefs();
ListObjectInspector li=(ListObjectInspector)readerInspector.getStructFieldRef("list").getFieldObjectInspector();
MapObjectInspector ma=(MapObjectInspector)readerInspector.getStructFieldRef("map").getFieldObjectInspector();
TimestampObjectInspector tso=(TimestampObjectInspector)readerInspector.getStructFieldRef("ts").getFieldObjectInspector();
HiveDecimalObjectInspector dco=(HiveDecimalObjectInspector)readerInspector.getStructFieldRef("decimal1").getFieldObjectInspector();
StringObjectInspector mk=(StringObjectInspector)ma.getMapKeyObjectInspector();
RecordReader rows=reader.rows();
Object row=rows.next(null);
assertNotNull(row);
assertEquals(false,bo.get(readerInspector.getStructFieldData(row,fields.get(0))));
assertEquals(1,by.get(readerInspector.getStructFieldData(row,fields.get(1))));
assertEquals(1024,sh.get(readerInspector.getStructFieldData(row,fields.get(2))));
assertEquals(65536,in.get(readerInspector.getStructFieldData(row,fields.get(3))));
assertEquals(Long.MAX_VALUE,lo.get(readerInspector.getStructFieldData(row,fields.get(4))));
assertEquals(1.0,fl.get(readerInspector.getStructFieldData(row,fields.get(5))),0.00001);
assertEquals(-15.0,dbl.get(readerInspector.getStructFieldData(row,fields.get(6))),0.00001);
assertEquals(bytes(0,1,2,3,4),bi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,fields.get(7))));
assertEquals("hi",st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(8))));
List> midRow=midli.getList(mid.getStructFieldData(readerInspector.getStructFieldData(row,fields.get(9)),midFields.get(0)));
assertNotNull(midRow);
assertEquals(2,midRow.size());
assertEquals(1,in.get(inner.getStructFieldData(midRow.get(0),inFields.get(0))));
assertEquals("bye",st.getPrimitiveJavaObject(inner.getStructFieldData(midRow.get(0),inFields.get(1))));
assertEquals(2,in.get(inner.getStructFieldData(midRow.get(1),inFields.get(0))));
assertEquals("sigh",st.getPrimitiveJavaObject(inner.getStructFieldData(midRow.get(1),inFields.get(1))));
List> list=li.getList(readerInspector.getStructFieldData(row,fields.get(10)));
assertEquals(2,list.size());
assertEquals(3,in.get(inner.getStructFieldData(list.get(0),inFields.get(0))));
assertEquals("good",st.getPrimitiveJavaObject(inner.getStructFieldData(list.get(0),inFields.get(1))));
assertEquals(4,in.get(inner.getStructFieldData(list.get(1),inFields.get(0))));
assertEquals("bad",st.getPrimitiveJavaObject(inner.getStructFieldData(list.get(1),inFields.get(1))));
Map,?> map=ma.getMap(readerInspector.getStructFieldData(row,fields.get(11)));
assertEquals(0,map.size());
assertEquals(Timestamp.valueOf("2000-03-12 15:00:00"),tso.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(12))));
assertEquals(HiveDecimal.create("12345678.6547456"),dco.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(13))));
assertEquals(true,rows.hasNext());
rows.seekToRow(7499);
row=rows.next(null);
assertEquals(true,bo.get(readerInspector.getStructFieldData(row,fields.get(0))));
assertEquals(100,by.get(readerInspector.getStructFieldData(row,fields.get(1))));
assertEquals(2048,sh.get(readerInspector.getStructFieldData(row,fields.get(2))));
assertEquals(65536,in.get(readerInspector.getStructFieldData(row,fields.get(3))));
assertEquals(Long.MAX_VALUE,lo.get(readerInspector.getStructFieldData(row,fields.get(4))));
assertEquals(2.0,fl.get(readerInspector.getStructFieldData(row,fields.get(5))),0.00001);
assertEquals(-5.0,dbl.get(readerInspector.getStructFieldData(row,fields.get(6))),0.00001);
assertEquals(bytes(),bi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,fields.get(7))));
assertEquals("bye",st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(8))));
midRow=midli.getList(mid.getStructFieldData(readerInspector.getStructFieldData(row,fields.get(9)),midFields.get(0)));
assertNotNull(midRow);
assertEquals(2,midRow.size());
assertEquals(1,in.get(inner.getStructFieldData(midRow.get(0),inFields.get(0))));
assertEquals("bye",st.getPrimitiveJavaObject(inner.getStructFieldData(midRow.get(0),inFields.get(1))));
assertEquals(2,in.get(inner.getStructFieldData(midRow.get(1),inFields.get(0))));
assertEquals("sigh",st.getPrimitiveJavaObject(inner.getStructFieldData(midRow.get(1),inFields.get(1))));
list=li.getList(readerInspector.getStructFieldData(row,fields.get(10)));
assertEquals(3,list.size());
assertEquals(100000000,in.get(inner.getStructFieldData(list.get(0),inFields.get(0))));
assertEquals("cat",st.getPrimitiveJavaObject(inner.getStructFieldData(list.get(0),inFields.get(1))));
assertEquals(-100000,in.get(inner.getStructFieldData(list.get(1),inFields.get(0))));
assertEquals("in",st.getPrimitiveJavaObject(inner.getStructFieldData(list.get(1),inFields.get(1))));
assertEquals(1234,in.get(inner.getStructFieldData(list.get(2),inFields.get(0))));
assertEquals("hat",st.getPrimitiveJavaObject(inner.getStructFieldData(list.get(2),inFields.get(1))));
map=ma.getMap(readerInspector.getStructFieldData(row,fields.get(11)));
assertEquals(2,map.size());
boolean[] found=new boolean[2];
for ( Object key : map.keySet()) {
String str=mk.getPrimitiveJavaObject(key);
if (str.equals("chani")) {
assertEquals(false,found[0]);
assertEquals(5,in.get(inner.getStructFieldData(map.get(key),inFields.get(0))));
assertEquals(str,st.getPrimitiveJavaObject(inner.getStructFieldData(map.get(key),inFields.get(1))));
found[0]=true;
}
else if (str.equals("mauddib")) {
assertEquals(false,found[1]);
assertEquals(1,in.get(inner.getStructFieldData(map.get(key),inFields.get(0))));
assertEquals(str,st.getPrimitiveJavaObject(inner.getStructFieldData(map.get(key),inFields.get(1))));
found[1]=true;
}
else {
throw new IllegalArgumentException("Unknown key " + str);
}
}
assertEquals(true,found[0]);
assertEquals(true,found[1]);
assertEquals(Timestamp.valueOf("2000-03-12 15:00:01"),tso.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(12))));
assertEquals(HiveDecimal.create("12345678.6547457"),dco.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(13))));
assertEquals(false,rows.hasNext());
rows.close();
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testMemoryManagementV12() throws Exception {
TypeDescription schema=createInnerSchema();
MyMemoryManager memory=new MyMemoryManager(conf,10000,0.1);
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).setSchema(schema).compress(CompressionKind.NONE).stripeSize(50000).bufferSize(100).rowIndexStride(0).memory(memory).version(OrcFile.Version.V_0_12));
VectorizedRowBatch batch=schema.createRowBatch();
assertEquals(testFilePath,memory.path);
batch.size=1;
for (int i=0; i < 2500; ++i) {
((LongColumnVector)batch.cols[0]).vector[0]=i * 300;
((BytesColumnVector)batch.cols[1]).setVal(0,Integer.toHexString(10 * i).getBytes());
writer.addRowBatch(batch);
}
writer.close();
assertEquals(null,memory.path);
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
int i=0;
for ( StripeInformation stripe : reader.getStripes()) {
i+=1;
assertTrue("stripe " + i + " is too long at "+ stripe.getDataLength(),stripe.getDataLength() < 5000);
}
assertEquals(3,i);
assertEquals(2500,reader.getNumberOfRows());
}
APIUtilityVerifier IterativeVerifier BranchVerifier InternalCallVerifier EqualityVerifier
@Test public void testStructs() throws Exception {
TypeDescription schema=TypeDescription.createStruct().addField("struct",TypeDescription.createStruct().addField("inner",TypeDescription.createLong()));
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).setSchema(schema));
VectorizedRowBatch batch=schema.createRowBatch();
batch.size=1024;
StructColumnVector outer=(StructColumnVector)batch.cols[0];
outer.noNulls=false;
for (int r=0; r < 1024; ++r) {
if (r < 200 || (r >= 400 && r < 600) || r >= 800) {
outer.isNull[r]=true;
}
((LongColumnVector)outer.fields[0]).vector[r]=r;
}
writer.addRowBatch(batch);
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf));
RecordReader rows=reader.rows();
OrcStruct row=null;
for (int r=0; r < 1024; ++r) {
assertEquals(true,rows.hasNext());
row=(OrcStruct)rows.next(row);
OrcStruct inner=(OrcStruct)row.getFieldValue(0);
if (r < 200 || (r >= 400 && r < 600) || r >= 800) {
assertEquals("row " + r,null,inner);
}
else {
assertEquals("row " + r,"{" + r + "}",inner.toString());
}
}
assertEquals(false,rows.hasNext());
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
/**
* Test all of the types that have distinct ORC writers using the vectorized
* writer with different combinations of repeating and null values.
* @throws Exception
*/
@Test public void testRepeating() throws Exception {
TypeDescription schema=TypeDescription.createStruct().addField("bin",TypeDescription.createBinary()).addField("bool",TypeDescription.createBoolean()).addField("byte",TypeDescription.createByte()).addField("long",TypeDescription.createLong()).addField("float",TypeDescription.createFloat()).addField("double",TypeDescription.createDouble()).addField("date",TypeDescription.createDate()).addField("time",TypeDescription.createTimestamp()).addField("dec",TypeDescription.createDecimal().withPrecision(20).withScale(6)).addField("string",TypeDescription.createString()).addField("char",TypeDescription.createChar().withMaxLength(10)).addField("vc",TypeDescription.createVarchar().withMaxLength(10)).addField("struct",TypeDescription.createStruct().addField("sub1",TypeDescription.createInt())).addField("union",TypeDescription.createUnion().addUnionChild(TypeDescription.createString()).addUnionChild(TypeDescription.createInt())).addField("list",TypeDescription.createList(TypeDescription.createInt())).addField("map",TypeDescription.createMap(TypeDescription.createString(),TypeDescription.createString()));
VectorizedRowBatch batch=schema.createRowBatch();
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).setSchema(schema).rowIndexStride(1000));
batch.size=1024;
for (int c=0; c < batch.cols.length; ++c) {
batch.cols[c].setRepeating(true);
batch.cols[c].noNulls=false;
batch.cols[c].isNull[0]=true;
}
writer.addRowBatch(batch);
for (int c=0; c < batch.cols.length; ++c) {
batch.cols[c].isNull[0]=false;
}
((BytesColumnVector)batch.cols[0]).setVal(0,"Horton".getBytes());
((LongColumnVector)batch.cols[1]).vector[0]=1;
((LongColumnVector)batch.cols[2]).vector[0]=130;
((LongColumnVector)batch.cols[3]).vector[0]=0x123456789abcdef0L;
((DoubleColumnVector)batch.cols[4]).vector[0]=1.125;
((DoubleColumnVector)batch.cols[5]).vector[0]=0.0009765625;
((LongColumnVector)batch.cols[6]).vector[0]=new DateWritable(new Date(111,6,1)).getDays();
((LongColumnVector)batch.cols[7]).vector[0]=TimestampUtils.getTimeNanoSec(new Timestamp(115,9,23,10,11,59,999999999));
((DecimalColumnVector)batch.cols[8]).vector[0]=new HiveDecimalWritable("1.234567");
((BytesColumnVector)batch.cols[9]).setVal(0,"Echelon".getBytes());
((BytesColumnVector)batch.cols[10]).setVal(0,"Juggernaut".getBytes());
((BytesColumnVector)batch.cols[11]).setVal(0,"Dreadnaught".getBytes());
((LongColumnVector)((StructColumnVector)batch.cols[12]).fields[0]).vector[0]=123;
((UnionColumnVector)batch.cols[13]).tags[0]=1;
((LongColumnVector)((UnionColumnVector)batch.cols[13]).fields[1]).vector[0]=1234;
((ListColumnVector)batch.cols[14]).offsets[0]=0;
((ListColumnVector)batch.cols[14]).lengths[0]=3;
((ListColumnVector)batch.cols[14]).child.isRepeating=true;
((LongColumnVector)((ListColumnVector)batch.cols[14]).child).vector[0]=31415;
((MapColumnVector)batch.cols[15]).offsets[0]=0;
((MapColumnVector)batch.cols[15]).lengths[0]=3;
((MapColumnVector)batch.cols[15]).values.isRepeating=true;
((BytesColumnVector)((MapColumnVector)batch.cols[15]).keys).setVal(0,"ORC".getBytes());
((BytesColumnVector)((MapColumnVector)batch.cols[15]).keys).setVal(1,"Hive".getBytes());
((BytesColumnVector)((MapColumnVector)batch.cols[15]).keys).setVal(2,"LLAP".getBytes());
((BytesColumnVector)((MapColumnVector)batch.cols[15]).values).setVal(0,"fast".getBytes());
writer.addRowBatch(batch);
for (int c=0; c < batch.cols.length; ++c) {
batch.cols[c].setRepeating(false);
batch.cols[c].noNulls=false;
Arrays.fill(batch.cols[c].isNull,true);
}
writer.addRowBatch(batch);
batch.reset();
batch.size=1024;
((ListColumnVector)batch.cols[14]).child.ensureSize(3 * 1024,false);
((MapColumnVector)batch.cols[15]).keys.ensureSize(3 * 1024,false);
((MapColumnVector)batch.cols[15]).values.ensureSize(3 * 1024,false);
for (int r=0; r < 1024; ++r) {
((BytesColumnVector)batch.cols[0]).setVal(r,Integer.toHexString(r).getBytes());
((LongColumnVector)batch.cols[1]).vector[r]=r % 2;
((LongColumnVector)batch.cols[2]).vector[r]=(r % 255);
((LongColumnVector)batch.cols[3]).vector[r]=31415L * r;
((DoubleColumnVector)batch.cols[4]).vector[r]=1.125 * r;
((DoubleColumnVector)batch.cols[5]).vector[r]=0.0009765625 * r;
((LongColumnVector)batch.cols[6]).vector[r]=new DateWritable(new Date(111,6,1)).getDays() + r;
((LongColumnVector)batch.cols[7]).vector[r]=TimestampUtils.getTimeNanoSec(new Timestamp(115,9,23,10,11,59,999999999)) + r * 1000000000L;
((DecimalColumnVector)batch.cols[8]).vector[r]=new HiveDecimalWritable("1.234567");
((BytesColumnVector)batch.cols[9]).setVal(r,Integer.toString(r).getBytes());
((BytesColumnVector)batch.cols[10]).setVal(r,Integer.toHexString(r).getBytes());
((BytesColumnVector)batch.cols[11]).setVal(r,Integer.toHexString(r * 128).getBytes());
((LongColumnVector)((StructColumnVector)batch.cols[12]).fields[0]).vector[r]=r + 13;
((UnionColumnVector)batch.cols[13]).tags[r]=1;
((LongColumnVector)((UnionColumnVector)batch.cols[13]).fields[1]).vector[r]=r + 42;
((ListColumnVector)batch.cols[14]).offsets[r]=3 * r;
((ListColumnVector)batch.cols[14]).lengths[r]=3;
for (int i=0; i < 3; ++i) {
((LongColumnVector)((ListColumnVector)batch.cols[14]).child).vector[3 * r + i]=31415 + i;
}
((MapColumnVector)batch.cols[15]).offsets[r]=3 * r;
((MapColumnVector)batch.cols[15]).lengths[r]=3;
for (int i=0; i < 3; ++i) {
((BytesColumnVector)((MapColumnVector)batch.cols[15]).keys).setVal(3 * r + i,Integer.toHexString(3 * r + i).getBytes());
((BytesColumnVector)((MapColumnVector)batch.cols[15]).values).setVal(3 * r + i,Integer.toString(3 * r + i).getBytes());
}
}
writer.addRowBatch(batch);
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
ColumnStatistics[] stats=reader.getStatistics();
assertEquals(4096,stats[0].getNumberOfValues());
assertEquals(false,stats[0].hasNull());
for ( TypeDescription colType : schema.getChildren()) {
assertEquals("count on " + colType.getId(),2048,stats[colType.getId()].getNumberOfValues());
assertEquals("hasNull on " + colType.getId(),true,stats[colType.getId()].hasNull());
}
assertEquals(8944,((BinaryColumnStatistics)stats[1]).getSum());
assertEquals(1536,((BooleanColumnStatistics)stats[2]).getTrueCount());
assertEquals(512,((BooleanColumnStatistics)stats[2]).getFalseCount());
assertEquals(false,((IntegerColumnStatistics)stats[4]).isSumDefined());
assertEquals(0,((IntegerColumnStatistics)stats[4]).getMinimum());
assertEquals(0x123456789abcdef0L,((IntegerColumnStatistics)stats[4]).getMaximum());
assertEquals("0",((StringColumnStatistics)stats[10]).getMinimum());
assertEquals("Echelon",((StringColumnStatistics)stats[10]).getMaximum());
assertEquals(10154,((StringColumnStatistics)stats[10]).getSum());
assertEquals("0 ",((StringColumnStatistics)stats[11]).getMinimum());
assertEquals("ff ",((StringColumnStatistics)stats[11]).getMaximum());
assertEquals(20480,((StringColumnStatistics)stats[11]).getSum());
assertEquals("0",((StringColumnStatistics)stats[12]).getMinimum());
assertEquals("ff80",((StringColumnStatistics)stats[12]).getMaximum());
assertEquals(14813,((StringColumnStatistics)stats[12]).getSum());
RecordReader rows=reader.rows();
OrcStruct row=null;
for (int r=0; r < 1024; ++r) {
assertEquals(true,rows.hasNext());
row=(OrcStruct)rows.next(row);
for (int f=0; f < row.getNumFields(); ++f) {
assertEquals("non-null on row " + r + " field "+ f,null,row.getFieldValue(f));
}
}
for (int r=0; r < 1024; ++r) {
assertEquals(true,rows.hasNext());
row=(OrcStruct)rows.next(row);
assertEquals("row " + r,"48 6f 72 74 6f 6e",row.getFieldValue(0).toString());
assertEquals("row " + r,"true",row.getFieldValue(1).toString());
assertEquals("row " + r,"-126",row.getFieldValue(2).toString());
assertEquals("row " + r,"1311768467463790320",row.getFieldValue(3).toString());
assertEquals("row " + r,"1.125",row.getFieldValue(4).toString());
assertEquals("row " + r,"9.765625E-4",row.getFieldValue(5).toString());
assertEquals("row " + r,"2011-07-01",row.getFieldValue(6).toString());
assertEquals("row " + r,"2015-10-23 10:11:59.999999999",row.getFieldValue(7).toString());
assertEquals("row " + r,"1.234567",row.getFieldValue(8).toString());
assertEquals("row " + r,"Echelon",row.getFieldValue(9).toString());
assertEquals("row " + r,"Juggernaut",row.getFieldValue(10).toString());
assertEquals("row " + r,"Dreadnaugh",row.getFieldValue(11).toString());
assertEquals("row " + r,"{123}",row.getFieldValue(12).toString());
assertEquals("row " + r,"union(1, 1234)",row.getFieldValue(13).toString());
assertEquals("row " + r,"[31415, 31415, 31415]",row.getFieldValue(14).toString());
assertEquals("row " + r,"{ORC=fast, Hive=fast, LLAP=fast}",row.getFieldValue(15).toString());
}
for (int r=0; r < 1024; ++r) {
assertEquals(true,rows.hasNext());
row=(OrcStruct)rows.next(row);
for (int f=0; f < row.getNumFields(); ++f) {
assertEquals("non-null on row " + r + " field "+ f,null,row.getFieldValue(f));
}
}
for (int r=0; r < 1024; ++r) {
assertEquals(true,rows.hasNext());
row=(OrcStruct)rows.next(row);
byte[] hex=Integer.toHexString(r).getBytes();
StringBuilder expected=new StringBuilder();
for (int i=0; i < hex.length; ++i) {
if (i != 0) {
expected.append(' ');
}
expected.append(Integer.toHexString(hex[i]));
}
assertEquals("row " + r,expected.toString(),row.getFieldValue(0).toString());
assertEquals("row " + r,r % 2 == 1 ? "true" : "false",row.getFieldValue(1).toString());
assertEquals("row " + r,Integer.toString((byte)(r % 255)),row.getFieldValue(2).toString());
assertEquals("row " + r,Long.toString(31415L * r),row.getFieldValue(3).toString());
assertEquals("row " + r,Float.toString(1.125F * r),row.getFieldValue(4).toString());
assertEquals("row " + r,Double.toString(0.0009765625 * r),row.getFieldValue(5).toString());
assertEquals("row " + r,new Date(111,6,1 + r).toString(),row.getFieldValue(6).toString());
assertEquals("row " + r,new Timestamp(115,9,23,10,11,59 + r,999999999).toString(),row.getFieldValue(7).toString());
assertEquals("row " + r,"1.234567",row.getFieldValue(8).toString());
assertEquals("row " + r,Integer.toString(r),row.getFieldValue(9).toString());
assertEquals("row " + r,pad(Integer.toHexString(r),10),row.getFieldValue(10).toString());
assertEquals("row " + r,Integer.toHexString(r * 128),row.getFieldValue(11).toString());
assertEquals("row " + r,"{" + Integer.toString(r + 13) + "}",row.getFieldValue(12).toString());
assertEquals("row " + r,"union(1, " + Integer.toString(r + 42) + ")",row.getFieldValue(13).toString());
assertEquals("row " + r,"[31415, 31416, 31417]",row.getFieldValue(14).toString());
expected=new StringBuilder();
expected.append('{');
expected.append(Integer.toHexString(3 * r));
expected.append('=');
expected.append(3 * r);
expected.append(", ");
expected.append(Integer.toHexString(3 * r + 1));
expected.append('=');
expected.append(3 * r + 1);
expected.append(", ");
expected.append(Integer.toHexString(3 * r + 2));
expected.append('=');
expected.append(3 * r + 2);
expected.append('}');
assertEquals("row " + r,expected.toString(),row.getFieldValue(15).toString());
}
assertEquals(false,rows.hasNext());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testStringAndBinaryStatistics() throws Exception {
TypeDescription schema=TypeDescription.createStruct().addField("bytes1",TypeDescription.createBinary()).addField("string1",TypeDescription.createString());
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000).bufferSize(10000));
VectorizedRowBatch batch=schema.createRowBatch();
batch.size=4;
BytesColumnVector field1=(BytesColumnVector)batch.cols[0];
BytesColumnVector field2=(BytesColumnVector)batch.cols[1];
field1.setVal(0,bytesArray(0,1,2,3,4));
field1.setVal(1,bytesArray(0,1,2,3));
field1.setVal(2,bytesArray(0,1,2,3,4,5));
field1.noNulls=false;
field1.isNull[3]=true;
field2.setVal(0,"foo".getBytes());
field2.setVal(1,"bar".getBytes());
field2.noNulls=false;
field2.isNull[2]=true;
field2.setVal(3,"hi".getBytes());
writer.addRowBatch(batch);
writer.close();
schema=writer.getSchema();
assertEquals(2,schema.getMaximumId());
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
boolean[] expected=new boolean[]{false,false,true};
boolean[] included=OrcUtils.includeColumns("string1",schema);
assertEquals(true,Arrays.equals(expected,included));
expected=new boolean[]{false,false,false};
included=OrcUtils.includeColumns("",schema);
assertEquals(true,Arrays.equals(expected,included));
expected=new boolean[]{false,false,false};
included=OrcUtils.includeColumns(null,schema);
assertEquals(true,Arrays.equals(expected,included));
ColumnStatistics[] stats=reader.getStatistics();
assertEquals(4,stats[0].getNumberOfValues());
assertEquals("count: 4 hasNull: false",stats[0].toString());
assertEquals(3,stats[1].getNumberOfValues());
assertEquals(15,((BinaryColumnStatistics)stats[1]).getSum());
assertEquals("count: 3 hasNull: true sum: 15",stats[1].toString());
assertEquals(3,stats[2].getNumberOfValues());
assertEquals("bar",((StringColumnStatistics)stats[2]).getMinimum());
assertEquals("hi",((StringColumnStatistics)stats[2]).getMaximum());
assertEquals(8,((StringColumnStatistics)stats[2]).getSum());
assertEquals("count: 3 hasNull: true min: bar max: hi sum: 8",stats[2].toString());
StructObjectInspector readerInspector=(StructObjectInspector)reader.getObjectInspector();
assertEquals(ObjectInspector.Category.STRUCT,readerInspector.getCategory());
assertEquals("struct",readerInspector.getTypeName());
List extends StructField> fields=readerInspector.getAllStructFieldRefs();
BinaryObjectInspector bi=(BinaryObjectInspector)readerInspector.getStructFieldRef("bytes1").getFieldObjectInspector();
StringObjectInspector st=(StringObjectInspector)readerInspector.getStructFieldRef("string1").getFieldObjectInspector();
RecordReader rows=reader.rows();
Object row=rows.next(null);
assertNotNull(row);
assertEquals(bytes(0,1,2,3,4),bi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,fields.get(0))));
assertEquals("foo",st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(1))));
assertEquals(true,rows.hasNext());
row=rows.next(row);
assertEquals(bytes(0,1,2,3),bi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,fields.get(0))));
assertEquals("bar",st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(1))));
assertEquals(true,rows.hasNext());
row=rows.next(row);
assertEquals(bytes(0,1,2,3,4,5),bi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,fields.get(0))));
assertNull(st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(1))));
assertEquals(true,rows.hasNext());
row=rows.next(row);
assertNull(bi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,fields.get(0))));
assertEquals("hi",st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(1))));
assertEquals(false,rows.hasNext());
rows.close();
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
/**
* Test the char and varchar padding and truncation.
* @throws Exception
*/
@Test public void testStringPadding() throws Exception {
TypeDescription schema=TypeDescription.createStruct().addField("char",TypeDescription.createChar().withMaxLength(10)).addField("varchar",TypeDescription.createVarchar().withMaxLength(10));
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).setSchema(schema));
VectorizedRowBatch batch=schema.createRowBatch();
batch.size=4;
for (int c=0; c < batch.cols.length; ++c) {
((BytesColumnVector)batch.cols[c]).setVal(0,"".getBytes());
((BytesColumnVector)batch.cols[c]).setVal(1,"xyz".getBytes());
((BytesColumnVector)batch.cols[c]).setVal(2,"0123456789".getBytes());
((BytesColumnVector)batch.cols[c]).setVal(3,"0123456789abcdef".getBytes());
}
writer.addRowBatch(batch);
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf));
RecordReader rows=reader.rows();
batch=rows.nextBatch(null);
assertEquals(4,batch.size);
assertEquals("",makeString((BytesColumnVector)batch.cols[0],0));
assertEquals("xyz",makeString((BytesColumnVector)batch.cols[0],1));
assertEquals("0123456789",makeString((BytesColumnVector)batch.cols[0],2));
assertEquals("0123456789",makeString((BytesColumnVector)batch.cols[0],3));
assertEquals("",makeString((BytesColumnVector)batch.cols[1],0));
assertEquals("xyz",makeString((BytesColumnVector)batch.cols[1],1));
assertEquals("0123456789",makeString((BytesColumnVector)batch.cols[1],2));
assertEquals("0123456789",makeString((BytesColumnVector)batch.cols[1],3));
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testTimestamp() throws Exception {
ObjectInspector inspector;
synchronized (TestVectorOrcFile.class) {
inspector=ObjectInspectorFactory.getReflectionObjectInspector(Timestamp.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
TypeDescription schema=TypeDescription.createTimestamp();
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000).bufferSize(10000).version(org.apache.orc.OrcFile.Version.V_0_11));
List tslist=Lists.newArrayList();
tslist.add(Timestamp.valueOf("2037-01-01 00:00:00.000999"));
tslist.add(Timestamp.valueOf("2003-01-01 00:00:00.000000222"));
tslist.add(Timestamp.valueOf("1999-01-01 00:00:00.999999999"));
tslist.add(Timestamp.valueOf("1995-01-01 00:00:00.688888888"));
tslist.add(Timestamp.valueOf("2002-01-01 00:00:00.1"));
tslist.add(Timestamp.valueOf("2010-03-02 00:00:00.000009001"));
tslist.add(Timestamp.valueOf("2005-01-01 00:00:00.000002229"));
tslist.add(Timestamp.valueOf("2006-01-01 00:00:00.900203003"));
tslist.add(Timestamp.valueOf("2003-01-01 00:00:00.800000007"));
tslist.add(Timestamp.valueOf("1996-08-02 00:00:00.723100809"));
tslist.add(Timestamp.valueOf("1998-11-02 00:00:00.857340643"));
tslist.add(Timestamp.valueOf("2008-10-02 00:00:00"));
VectorizedRowBatch batch=new VectorizedRowBatch(1,1024);
LongColumnVector vec=new LongColumnVector(1024);
batch.cols[0]=vec;
batch.reset();
batch.size=tslist.size();
for (int i=0; i < tslist.size(); ++i) {
Timestamp ts=tslist.get(i);
vec.vector[i]=TimestampUtils.getTimeNanoSec(ts);
}
writer.addRowBatch(batch);
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows(null);
int idx=0;
while (rows.hasNext()) {
Object row=rows.next(null);
assertEquals(tslist.get(idx++).getNanos(),((TimestampWritable)row).getNanos());
}
assertEquals(tslist.size(),rows.getRowNumber());
assertEquals(0,writer.getSchema().getMaximumId());
boolean[] expected=new boolean[]{false};
boolean[] included=OrcUtils.includeColumns("",writer.getSchema());
assertEquals(true,Arrays.equals(expected,included));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testMemoryManagementV11() throws Exception {
TypeDescription schema=createInnerSchema();
MyMemoryManager memory=new MyMemoryManager(conf,10000,0.1);
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).setSchema(schema).compress(CompressionKind.NONE).stripeSize(50000).bufferSize(100).rowIndexStride(0).memory(memory).version(OrcFile.Version.V_0_11));
assertEquals(testFilePath,memory.path);
VectorizedRowBatch batch=schema.createRowBatch();
batch.size=1;
for (int i=0; i < 2500; ++i) {
((LongColumnVector)batch.cols[0]).vector[0]=i * 300;
((BytesColumnVector)batch.cols[1]).setVal(0,Integer.toHexString(10 * i).getBytes());
writer.addRowBatch(batch);
}
writer.close();
assertEquals(null,memory.path);
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
int i=0;
for ( StripeInformation stripe : reader.getStripes()) {
i+=1;
assertTrue("stripe " + i + " is too long at "+ stripe.getDataLength(),stripe.getDataLength() < 5000);
}
assertEquals(25,i);
assertEquals(2500,reader.getNumberOfRows());
}
APIUtilityVerifier IterativeVerifier BranchVerifier InternalCallVerifier EqualityVerifier
/**
* We test union, timestamp, and decimal separately since we need to make the
* object inspector manually. (The Hive reflection-based doesn't handle
* them properly.)
*/
@Test public void testUnionAndTimestamp() throws Exception {
TypeDescription schema=TypeDescription.createStruct().addField("time",TypeDescription.createTimestamp()).addField("union",TypeDescription.createUnion().addUnionChild(TypeDescription.createInt()).addUnionChild(TypeDescription.createString())).addField("decimal",TypeDescription.createDecimal().withPrecision(38).withScale(18));
HiveDecimal maxValue=HiveDecimal.create("10000000000000000000");
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).setSchema(schema).stripeSize(1000).compress(CompressionKind.NONE).bufferSize(100).blockPadding(false));
VectorizedRowBatch batch=schema.createRowBatch();
batch.size=6;
setUnion(batch,0,Timestamp.valueOf("2000-03-12 15:00:00"),0,42,null,new HiveDecimalWritable("12345678.6547456"));
setUnion(batch,1,Timestamp.valueOf("2000-03-20 12:00:00.123456789"),1,null,"hello",new HiveDecimalWritable("-5643.234"));
setUnion(batch,2,null,null,null,null,null);
setUnion(batch,3,null,0,null,null,null);
setUnion(batch,4,null,1,null,null,null);
setUnion(batch,5,Timestamp.valueOf("1970-01-01 00:00:00"),0,200000,null,new HiveDecimalWritable("10000000000000000000"));
writer.addRowBatch(batch);
batch.reset();
Random rand=new Random(42);
for (int i=1970; i < 2038; ++i) {
Timestamp ts=Timestamp.valueOf(i + "-05-05 12:34:56." + i);
HiveDecimal dec=HiveDecimal.create(new BigInteger(64,rand),rand.nextInt(18));
if ((i & 1) == 0) {
setUnion(batch,batch.size++,ts,0,i * i,null,new HiveDecimalWritable(dec));
}
else {
setUnion(batch,batch.size++,ts,1,null,Integer.toString(i * i),new HiveDecimalWritable(dec));
}
if (maxValue.compareTo(dec) < 0) {
maxValue=dec;
}
}
writer.addRowBatch(batch);
batch.reset();
batch.size=1000;
for (int c=0; c < batch.cols.length; ++c) {
batch.cols[c].setRepeating(true);
}
setUnion(batch,0,null,0,1732050807,null,null);
for (int i=0; i < 5; ++i) {
writer.addRowBatch(batch);
}
batch.reset();
batch.size=3;
setUnion(batch,0,null,0,0,null,null);
setUnion(batch,1,null,0,10,null,null);
setUnion(batch,2,null,0,138,null,null);
writer.addRowBatch(batch);
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
schema=writer.getSchema();
assertEquals(5,schema.getMaximumId());
boolean[] expected=new boolean[]{false,false,false,false,false,false};
boolean[] included=OrcUtils.includeColumns("",schema);
assertEquals(true,Arrays.equals(expected,included));
expected=new boolean[]{false,true,false,false,false,true};
included=OrcUtils.includeColumns("time,decimal",schema);
assertEquals(true,Arrays.equals(expected,included));
expected=new boolean[]{false,false,true,true,true,false};
included=OrcUtils.includeColumns("union",schema);
assertEquals(true,Arrays.equals(expected,included));
assertEquals(false,reader.getMetadataKeys().iterator().hasNext());
assertEquals(5077,reader.getNumberOfRows());
DecimalColumnStatistics stats=(DecimalColumnStatistics)reader.getStatistics()[5];
assertEquals(71,stats.getNumberOfValues());
assertEquals(HiveDecimal.create("-5643.234"),stats.getMinimum());
assertEquals(maxValue,stats.getMaximum());
int stripeCount=0;
int rowCount=0;
long currentOffset=-1;
for ( StripeInformation stripe : reader.getStripes()) {
stripeCount+=1;
rowCount+=stripe.getNumberOfRows();
if (currentOffset < 0) {
currentOffset=stripe.getOffset() + stripe.getLength();
}
else {
assertEquals(currentOffset,stripe.getOffset());
currentOffset+=stripe.getLength();
}
}
assertEquals(reader.getNumberOfRows(),rowCount);
assertEquals(2,stripeCount);
assertEquals(reader.getContentLength(),currentOffset);
RecordReader rows=reader.rows();
assertEquals(0,rows.getRowNumber());
assertEquals(0.0,rows.getProgress(),0.000001);
assertEquals(true,rows.hasNext());
OrcStruct row=(OrcStruct)rows.next(null);
assertEquals(1,rows.getRowNumber());
ObjectInspector inspector=reader.getObjectInspector();
assertEquals("struct,decimal:decimal(38,18)>",inspector.getTypeName());
assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-12 15:00:00")),row.getFieldValue(0));
OrcUnion union=(OrcUnion)row.getFieldValue(1);
assertEquals(0,union.getTag());
assertEquals(new IntWritable(42),union.getObject());
assertEquals(new HiveDecimalWritable(HiveDecimal.create("12345678.6547456")),row.getFieldValue(2));
row=(OrcStruct)rows.next(row);
assertEquals(2,rows.getRowNumber());
assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")),row.getFieldValue(0));
assertEquals(1,union.getTag());
assertEquals(new Text("hello"),union.getObject());
assertEquals(new HiveDecimalWritable(HiveDecimal.create("-5643.234")),row.getFieldValue(2));
row=(OrcStruct)rows.next(row);
assertEquals(null,row.getFieldValue(0));
assertEquals(null,row.getFieldValue(1));
assertEquals(null,row.getFieldValue(2));
row=(OrcStruct)rows.next(row);
assertEquals(null,row.getFieldValue(0));
union=(OrcUnion)row.getFieldValue(1);
assertEquals(0,union.getTag());
assertEquals(null,union.getObject());
assertEquals(null,row.getFieldValue(2));
row=(OrcStruct)rows.next(row);
assertEquals(null,row.getFieldValue(0));
assertEquals(1,union.getTag());
assertEquals(null,union.getObject());
assertEquals(null,row.getFieldValue(2));
row=(OrcStruct)rows.next(row);
assertEquals(new TimestampWritable(Timestamp.valueOf("1970-01-01 00:00:00")),row.getFieldValue(0));
assertEquals(new IntWritable(200000),union.getObject());
assertEquals(new HiveDecimalWritable(HiveDecimal.create("10000000000000000000")),row.getFieldValue(2));
rand=new Random(42);
for (int i=1970; i < 2038; ++i) {
row=(OrcStruct)rows.next(row);
assertEquals(new TimestampWritable(Timestamp.valueOf(i + "-05-05 12:34:56." + i)),row.getFieldValue(0));
if ((i & 1) == 0) {
assertEquals(0,union.getTag());
assertEquals(new IntWritable(i * i),union.getObject());
}
else {
assertEquals(1,union.getTag());
assertEquals(new Text(Integer.toString(i * i)),union.getObject());
}
assertEquals(new HiveDecimalWritable(HiveDecimal.create(new BigInteger(64,rand),rand.nextInt(18))),row.getFieldValue(2));
}
for (int i=0; i < 5000; ++i) {
row=(OrcStruct)rows.next(row);
assertEquals(new IntWritable(1732050807),union.getObject());
}
row=(OrcStruct)rows.next(row);
assertEquals(new IntWritable(0),union.getObject());
row=(OrcStruct)rows.next(row);
assertEquals(new IntWritable(10),union.getObject());
row=(OrcStruct)rows.next(row);
assertEquals(new IntWritable(138),union.getObject());
assertEquals(false,rows.hasNext());
assertEquals(1.0,rows.getProgress(),0.00001);
assertEquals(reader.getNumberOfRows(),rows.getRowNumber());
rows.seekToRow(1);
row=(OrcStruct)rows.next(row);
assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")),row.getFieldValue(0));
assertEquals(1,union.getTag());
assertEquals(new Text("hello"),union.getObject());
assertEquals(new HiveDecimalWritable(HiveDecimal.create("-5643.234")),row.getFieldValue(2));
rows.close();
}
APIUtilityVerifier IterativeVerifier BranchVerifier InternalCallVerifier EqualityVerifier
/**
* Test maps and how they interact with the child column. In particular,
* put nulls between back to back lists and then make some lists that
* oper lap.
* @throws Exception
*/
@Test public void testMaps() throws Exception {
TypeDescription schema=TypeDescription.createStruct().addField("map",TypeDescription.createMap(TypeDescription.createLong(),TypeDescription.createLong()));
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).setSchema(schema));
VectorizedRowBatch batch=schema.createRowBatch();
batch.size=1024;
MapColumnVector map=(MapColumnVector)batch.cols[0];
map.noNulls=false;
for (int r=0; r < 1024; ++r) {
if (r < 200) {
map.isNull[r]=true;
}
else if (r < 300) {
map.offsets[r]=r - 200;
map.lengths[r]=1;
}
else if (r < 400) {
map.isNull[r]=true;
}
else if (r < 500) {
map.offsets[r]=r - 300;
map.lengths[r]=1;
}
else if (r < 600) {
map.isNull[r]=true;
}
else if (r < 700) {
map.offsets[r]=r;
map.lengths[r]=2;
}
else {
map.isNull[r]=true;
}
((LongColumnVector)map.keys).vector[r]=r;
((LongColumnVector)map.values).vector[r]=r * 10;
}
writer.addRowBatch(batch);
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf));
RecordReader rows=reader.rows();
OrcStruct row=null;
for (int r=0; r < 1024; ++r) {
assertEquals(true,rows.hasNext());
row=(OrcStruct)rows.next(row);
Map inner=(Map)row.getFieldValue(0);
if (r < 200) {
assertEquals("row " + r,null,inner);
}
else if (r < 300) {
assertEquals("row " + r,"{" + (r - 200) + "="+ ((r - 200) * 10)+ "}",inner.toString());
}
else if (r < 400) {
assertEquals("row " + r,null,inner);
}
else if (r < 500) {
assertEquals("row " + r,"{" + (r - 300) + "="+ ((r - 300) * 10)+ "}",inner.toString());
}
else if (r < 600) {
assertEquals("row " + r,null,inner);
}
else if (r < 700) {
assertEquals("row " + r,"{" + r + "="+ (r * 10)+ ", "+ (r + 1)+ "="+ (10 * (r + 1))+ "}",inner.toString());
}
else {
assertEquals("row " + r,null,inner);
}
}
assertEquals(false,rows.hasNext());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testEmptyFile() throws Exception {
TypeDescription schema=createBigRowSchema();
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).setSchema(schema).stripeSize(1000).compress(CompressionKind.NONE).bufferSize(100));
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
assertEquals(false,reader.rows().hasNext());
assertEquals(CompressionKind.NONE,reader.getCompressionKind());
assertEquals(0,reader.getNumberOfRows());
assertEquals(0,reader.getCompressionSize());
assertEquals(false,reader.getMetadataKeys().iterator().hasNext());
assertEquals(3,reader.getContentLength());
assertEquals(false,reader.getStripes().iterator().hasNext());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void metaData() throws Exception {
TypeDescription schema=createBigRowSchema();
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).setSchema(schema).stripeSize(1000).compress(CompressionKind.NONE).bufferSize(100));
writer.addUserMetadata("my.meta",byteBuf(1,2,3,4,5,6,7,-1,-2,127,-128));
writer.addUserMetadata("clobber",byteBuf(1,2,3));
writer.addUserMetadata("clobber",byteBuf(4,3,2,1));
ByteBuffer bigBuf=ByteBuffer.allocate(40000);
Random random=new Random(0);
random.nextBytes(bigBuf.array());
writer.addUserMetadata("big",bigBuf);
bigBuf.position(0);
VectorizedRowBatch batch=schema.createRowBatch();
batch.size=1;
setBigRow(batch,0,true,(byte)127,(short)1024,42,42L * 1024 * 1024* 1024,(float)3.1415,-2.713,null,null,null,null,null);
writer.addRowBatch(batch);
writer.addUserMetadata("clobber",byteBuf(5,7,11,13,17,19));
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
assertEquals(byteBuf(5,7,11,13,17,19),reader.getMetadataValue("clobber"));
assertEquals(byteBuf(1,2,3,4,5,6,7,-1,-2,127,-128),reader.getMetadataValue("my.meta"));
assertEquals(bigBuf,reader.getMetadataValue("big"));
try {
reader.getMetadataValue("unknown");
assertTrue(false);
}
catch ( IllegalArgumentException iae) {
}
int i=0;
for ( String key : reader.getMetadataKeys()) {
if ("my.meta".equals(key) || "clobber".equals(key) || "big".equals(key)) {
i+=1;
}
else {
throw new IllegalArgumentException("unknown key " + key);
}
}
assertEquals(3,i);
int numStripes=reader.getStripeStatistics().size();
assertEquals(1,numStripes);
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
/**
* Read and write a randomly generated snappy file.
* @throws Exception
*/
@Test public void testSnappy() throws Exception {
TypeDescription schema=createInnerSchema();
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).setSchema(schema).stripeSize(1000).compress(CompressionKind.SNAPPY).bufferSize(100));
VectorizedRowBatch batch=schema.createRowBatch();
Random rand=new Random(12);
batch.size=1000;
for (int b=0; b < 10; ++b) {
for (int r=0; r < 1000; ++r) {
((LongColumnVector)batch.cols[0]).vector[r]=rand.nextInt();
((BytesColumnVector)batch.cols[1]).setVal(r,Integer.toHexString(rand.nextInt()).getBytes());
}
writer.addRowBatch(batch);
}
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows=reader.rows();
rand=new Random(12);
OrcStruct row=null;
for (int i=0; i < 10000; ++i) {
assertEquals(true,rows.hasNext());
row=(OrcStruct)rows.next(row);
assertEquals(rand.nextInt(),((IntWritable)row.getFieldValue(0)).get());
assertEquals(Integer.toHexString(rand.nextInt()),row.getFieldValue(1).toString());
}
assertEquals(false,rows.hasNext());
rows.close();
}
APIUtilityVerifier BranchVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void test1() throws Exception {
TypeDescription schema=createBigRowSchema();
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000).bufferSize(10000));
VectorizedRowBatch batch=schema.createRowBatch();
batch.size=2;
setBigRow(batch,0,false,(byte)1,(short)1024,65536,Long.MAX_VALUE,(float)1.0,-15.0,bytes(0,1,2,3,4),"hi",new MiddleStruct(inner(1,"bye"),inner(2,"sigh")),list(inner(3,"good"),inner(4,"bad")),map());
setBigRow(batch,1,true,(byte)100,(short)2048,65536,Long.MAX_VALUE,(float)2.0,-5.0,bytes(),"bye",new MiddleStruct(inner(1,"bye"),inner(2,"sigh")),list(inner(100000000,"cat"),inner(-100000,"in"),inner(1234,"hat")),map(inner(5,"chani"),inner(1,"mauddib")));
writer.addRowBatch(batch);
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
schema=writer.getSchema();
assertEquals(23,schema.getMaximumId());
boolean[] expected=new boolean[]{false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false};
boolean[] included=OrcUtils.includeColumns("",schema);
assertEquals(true,Arrays.equals(expected,included));
expected=new boolean[]{false,true,false,false,false,false,false,false,false,true,true,true,true,true,true,false,false,false,false,true,true,true,true,true};
included=OrcUtils.includeColumns("boolean1,string1,middle,map",schema);
assertArrayEquals(expected,included);
expected=new boolean[]{false,true,false,false,false,false,false,false,false,true,true,true,true,true,true,false,false,false,false,true,true,true,true,true};
included=OrcUtils.includeColumns("boolean1,string1,middle,map",schema);
assertArrayEquals(expected,included);
expected=new boolean[]{false,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true};
included=OrcUtils.includeColumns("boolean1,byte1,short1,int1,long1,float1,double1,bytes1,string1,middle,list,map",schema);
assertEquals(true,Arrays.equals(expected,included));
ColumnStatistics[] stats=reader.getStatistics();
assertEquals(2,stats[1].getNumberOfValues());
assertEquals(1,((BooleanColumnStatistics)stats[1]).getFalseCount());
assertEquals(1,((BooleanColumnStatistics)stats[1]).getTrueCount());
assertEquals("count: 2 hasNull: false true: 1",stats[1].toString());
assertEquals(2048,((IntegerColumnStatistics)stats[3]).getMaximum());
assertEquals(1024,((IntegerColumnStatistics)stats[3]).getMinimum());
assertEquals(true,((IntegerColumnStatistics)stats[3]).isSumDefined());
assertEquals(3072,((IntegerColumnStatistics)stats[3]).getSum());
assertEquals("count: 2 hasNull: false min: 1024 max: 2048 sum: 3072",stats[3].toString());
StripeStatistics ss=reader.getStripeStatistics().get(0);
assertEquals(2,ss.getColumnStatistics()[0].getNumberOfValues());
assertEquals(1,((BooleanColumnStatistics)ss.getColumnStatistics()[1]).getTrueCount());
assertEquals(1024,((IntegerColumnStatistics)ss.getColumnStatistics()[3]).getMinimum());
assertEquals(2048,((IntegerColumnStatistics)ss.getColumnStatistics()[3]).getMaximum());
assertEquals(3072,((IntegerColumnStatistics)ss.getColumnStatistics()[3]).getSum());
assertEquals(-15.0,((DoubleColumnStatistics)stats[7]).getMinimum());
assertEquals(-5.0,((DoubleColumnStatistics)stats[7]).getMaximum());
assertEquals(-20.0,((DoubleColumnStatistics)stats[7]).getSum(),0.00001);
assertEquals("count: 2 hasNull: false min: -15.0 max: -5.0 sum: -20.0",stats[7].toString());
assertEquals("count: 2 hasNull: false min: bye max: hi sum: 5",stats[9].toString());
StructObjectInspector readerInspector=(StructObjectInspector)reader.getObjectInspector();
assertEquals(ObjectInspector.Category.STRUCT,readerInspector.getCategory());
assertEquals("struct>>,list:array>,"+ "map:map>>",readerInspector.getTypeName());
List extends StructField> fields=readerInspector.getAllStructFieldRefs();
BooleanObjectInspector bo=(BooleanObjectInspector)readerInspector.getStructFieldRef("boolean1").getFieldObjectInspector();
ByteObjectInspector by=(ByteObjectInspector)readerInspector.getStructFieldRef("byte1").getFieldObjectInspector();
ShortObjectInspector sh=(ShortObjectInspector)readerInspector.getStructFieldRef("short1").getFieldObjectInspector();
IntObjectInspector in=(IntObjectInspector)readerInspector.getStructFieldRef("int1").getFieldObjectInspector();
LongObjectInspector lo=(LongObjectInspector)readerInspector.getStructFieldRef("long1").getFieldObjectInspector();
FloatObjectInspector fl=(FloatObjectInspector)readerInspector.getStructFieldRef("float1").getFieldObjectInspector();
DoubleObjectInspector dbl=(DoubleObjectInspector)readerInspector.getStructFieldRef("double1").getFieldObjectInspector();
BinaryObjectInspector bi=(BinaryObjectInspector)readerInspector.getStructFieldRef("bytes1").getFieldObjectInspector();
StringObjectInspector st=(StringObjectInspector)readerInspector.getStructFieldRef("string1").getFieldObjectInspector();
StructObjectInspector mid=(StructObjectInspector)readerInspector.getStructFieldRef("middle").getFieldObjectInspector();
List extends StructField> midFields=mid.getAllStructFieldRefs();
ListObjectInspector midli=(ListObjectInspector)midFields.get(0).getFieldObjectInspector();
StructObjectInspector inner=(StructObjectInspector)midli.getListElementObjectInspector();
List extends StructField> inFields=inner.getAllStructFieldRefs();
ListObjectInspector li=(ListObjectInspector)readerInspector.getStructFieldRef("list").getFieldObjectInspector();
MapObjectInspector ma=(MapObjectInspector)readerInspector.getStructFieldRef("map").getFieldObjectInspector();
StringObjectInspector mk=(StringObjectInspector)ma.getMapKeyObjectInspector();
RecordReader rows=reader.rows();
Object row=rows.next(null);
assertNotNull(row);
assertEquals(false,bo.get(readerInspector.getStructFieldData(row,fields.get(0))));
assertEquals(1,by.get(readerInspector.getStructFieldData(row,fields.get(1))));
assertEquals(1024,sh.get(readerInspector.getStructFieldData(row,fields.get(2))));
assertEquals(65536,in.get(readerInspector.getStructFieldData(row,fields.get(3))));
assertEquals(Long.MAX_VALUE,lo.get(readerInspector.getStructFieldData(row,fields.get(4))));
assertEquals(1.0,fl.get(readerInspector.getStructFieldData(row,fields.get(5))),0.00001);
assertEquals(-15.0,dbl.get(readerInspector.getStructFieldData(row,fields.get(6))),0.00001);
assertEquals(bytes(0,1,2,3,4),bi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,fields.get(7))));
assertEquals("hi",st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(8))));
List> midRow=midli.getList(mid.getStructFieldData(readerInspector.getStructFieldData(row,fields.get(9)),midFields.get(0)));
assertNotNull(midRow);
assertEquals(2,midRow.size());
assertEquals(1,in.get(inner.getStructFieldData(midRow.get(0),inFields.get(0))));
assertEquals("bye",st.getPrimitiveJavaObject(inner.getStructFieldData(midRow.get(0),inFields.get(1))));
assertEquals(2,in.get(inner.getStructFieldData(midRow.get(1),inFields.get(0))));
assertEquals("sigh",st.getPrimitiveJavaObject(inner.getStructFieldData(midRow.get(1),inFields.get(1))));
List> list=li.getList(readerInspector.getStructFieldData(row,fields.get(10)));
assertEquals(2,list.size());
assertEquals(3,in.get(inner.getStructFieldData(list.get(0),inFields.get(0))));
assertEquals("good",st.getPrimitiveJavaObject(inner.getStructFieldData(list.get(0),inFields.get(1))));
assertEquals(4,in.get(inner.getStructFieldData(list.get(1),inFields.get(0))));
assertEquals("bad",st.getPrimitiveJavaObject(inner.getStructFieldData(list.get(1),inFields.get(1))));
Map,?> map=ma.getMap(readerInspector.getStructFieldData(row,fields.get(11)));
assertEquals(0,map.size());
assertEquals(true,rows.hasNext());
row=rows.next(row);
assertEquals(true,bo.get(readerInspector.getStructFieldData(row,fields.get(0))));
assertEquals(100,by.get(readerInspector.getStructFieldData(row,fields.get(1))));
assertEquals(2048,sh.get(readerInspector.getStructFieldData(row,fields.get(2))));
assertEquals(65536,in.get(readerInspector.getStructFieldData(row,fields.get(3))));
assertEquals(Long.MAX_VALUE,lo.get(readerInspector.getStructFieldData(row,fields.get(4))));
assertEquals(2.0,fl.get(readerInspector.getStructFieldData(row,fields.get(5))),0.00001);
assertEquals(-5.0,dbl.get(readerInspector.getStructFieldData(row,fields.get(6))),0.00001);
assertEquals(bytes(),bi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,fields.get(7))));
assertEquals("bye",st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,fields.get(8))));
midRow=midli.getList(mid.getStructFieldData(readerInspector.getStructFieldData(row,fields.get(9)),midFields.get(0)));
assertNotNull(midRow);
assertEquals(2,midRow.size());
assertEquals(1,in.get(inner.getStructFieldData(midRow.get(0),inFields.get(0))));
assertEquals("bye",st.getPrimitiveJavaObject(inner.getStructFieldData(midRow.get(0),inFields.get(1))));
assertEquals(2,in.get(inner.getStructFieldData(midRow.get(1),inFields.get(0))));
assertEquals("sigh",st.getPrimitiveJavaObject(inner.getStructFieldData(midRow.get(1),inFields.get(1))));
list=li.getList(readerInspector.getStructFieldData(row,fields.get(10)));
assertEquals(3,list.size());
assertEquals(100000000,in.get(inner.getStructFieldData(list.get(0),inFields.get(0))));
assertEquals("cat",st.getPrimitiveJavaObject(inner.getStructFieldData(list.get(0),inFields.get(1))));
assertEquals(-100000,in.get(inner.getStructFieldData(list.get(1),inFields.get(0))));
assertEquals("in",st.getPrimitiveJavaObject(inner.getStructFieldData(list.get(1),inFields.get(1))));
assertEquals(1234,in.get(inner.getStructFieldData(list.get(2),inFields.get(0))));
assertEquals("hat",st.getPrimitiveJavaObject(inner.getStructFieldData(list.get(2),inFields.get(1))));
map=ma.getMap(readerInspector.getStructFieldData(row,fields.get(11)));
assertEquals(2,map.size());
boolean[] found=new boolean[2];
for ( Object key : map.keySet()) {
String str=mk.getPrimitiveJavaObject(key);
if (str.equals("chani")) {
assertEquals(false,found[0]);
assertEquals(5,in.get(inner.getStructFieldData(map.get(key),inFields.get(0))));
assertEquals(str,st.getPrimitiveJavaObject(inner.getStructFieldData(map.get(key),inFields.get(1))));
found[0]=true;
}
else if (str.equals("mauddib")) {
assertEquals(false,found[1]);
assertEquals(1,in.get(inner.getStructFieldData(map.get(key),inFields.get(0))));
assertEquals(str,st.getPrimitiveJavaObject(inner.getStructFieldData(map.get(key),inFields.get(1))));
found[1]=true;
}
else {
throw new IllegalArgumentException("Unknown key " + str);
}
}
assertEquals(true,found[0]);
assertEquals(true,found[1]);
assertEquals(false,rows.hasNext());
rows.close();
}
APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testPredicatePushdown() throws Exception {
TypeDescription schema=createInnerSchema();
Writer writer=OrcFile.createWriter(testFilePath,OrcFile.writerOptions(conf).setSchema(schema).stripeSize(400000L).compress(CompressionKind.NONE).bufferSize(500).rowIndexStride(1000));
VectorizedRowBatch batch=schema.createRowBatch();
batch.ensureSize(3500);
batch.size=3500;
for (int i=0; i < 3500; ++i) {
((LongColumnVector)batch.cols[0]).vector[i]=i * 300;
((BytesColumnVector)batch.cols[1]).setVal(i,Integer.toHexString(10 * i).getBytes());
}
writer.addRowBatch(batch);
writer.close();
Reader reader=OrcFile.createReader(testFilePath,OrcFile.readerOptions(conf).filesystem(fs));
assertEquals(3500,reader.getNumberOfRows());
SearchArgument sarg=SearchArgumentFactory.newBuilder().startAnd().startNot().lessThan("int1",PredicateLeaf.Type.LONG,300000L).end().lessThan("int1",PredicateLeaf.Type.LONG,600000L).end().build();
RecordReader rows=reader.rowsOptions(new Reader.Options().range(0L,Long.MAX_VALUE).include(new boolean[]{true,true,true}).searchArgument(sarg,new String[]{null,"int1","string1"}));
assertEquals(1000L,rows.getRowNumber());
OrcStruct row=null;
for (int i=1000; i < 2000; ++i) {
assertTrue(rows.hasNext());
row=(OrcStruct)rows.next(row);
assertEquals(300 * i,((IntWritable)row.getFieldValue(0)).get());
assertEquals(Integer.toHexString(10 * i),row.getFieldValue(1).toString());
}
assertTrue(!rows.hasNext());
assertEquals(3500,rows.getRowNumber());
sarg=SearchArgumentFactory.newBuilder().startAnd().lessThan("int1",PredicateLeaf.Type.LONG,0L).end().build();
rows=reader.rowsOptions(new Reader.Options().range(0L,Long.MAX_VALUE).include(new boolean[]{true,true,true}).searchArgument(sarg,new String[]{null,"int1","string1"}));
assertEquals(3500L,rows.getRowNumber());
assertTrue(!rows.hasNext());
sarg=SearchArgumentFactory.newBuilder().startOr().lessThan("int1",PredicateLeaf.Type.LONG,300L * 100).startNot().lessThan("int1",PredicateLeaf.Type.LONG,300L * 3400).end().end().build();
rows=reader.rowsOptions(new Reader.Options().range(0L,Long.MAX_VALUE).include(new boolean[]{true,true,true}).searchArgument(sarg,new String[]{null,"int1","string1"}));
row=null;
for (int i=0; i < 1000; ++i) {
assertTrue(rows.hasNext());
assertEquals(i,rows.getRowNumber());
row=(OrcStruct)rows.next(row);
assertEquals(300 * i,((IntWritable)row.getFieldValue(0)).get());
assertEquals(Integer.toHexString(10 * i),row.getFieldValue(1).toString());
}
for (int i=3000; i < 3500; ++i) {
assertTrue(rows.hasNext());
assertEquals(i,rows.getRowNumber());
row=(OrcStruct)rows.next(row);
assertEquals(300 * i,((IntWritable)row.getFieldValue(0)).get());
assertEquals(Integer.toHexString(10 * i),row.getFieldValue(1).toString());
}
assertTrue(!rows.hasNext());
assertEquals(3500,rows.getRowNumber());
}
Class: org.apache.hadoop.hive.ql.io.parquet.TestArrayCompatibility APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testAvroSingleFieldGroupInList() throws Exception {
Path test=writeDirect("AvroSingleFieldGroupInList",Types.buildMessage().optionalGroup().as(LIST).repeatedGroup().required(INT64).named("count").named("array").named("single_element_groups").named("AvroSingleFieldGroupInList"),new DirectWriter(){
@Override public void write( RecordConsumer rc){
rc.startMessage();
rc.startField("single_element_groups",0);
rc.startGroup();
rc.startField("array",0);
rc.startGroup();
rc.startField("count",0);
rc.addLong(1234L);
rc.endField("count",0);
rc.endGroup();
rc.startGroup();
rc.startField("count",0);
rc.addLong(2345L);
rc.endField("count",0);
rc.endGroup();
rc.endField("array",0);
rc.endGroup();
rc.endField("single_element_groups",0);
rc.endMessage();
}
}
);
ArrayWritable expected=list(record(new LongWritable(1234L)),record(new LongWritable(2345L)));
List records=read(test);
Assert.assertEquals("Should have only one record",1,records.size());
assertEquals("Should match expected record",expected,records.get(0));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testMultiFieldGroupInList() throws Exception {
Path test=writeDirect("MultiFieldGroupInList",Types.buildMessage().optionalGroup().as(LIST).repeatedGroup().required(DOUBLE).named("latitude").required(DOUBLE).named("longitude").named("element").named("locations").named("MultiFieldGroupInList"),new DirectWriter(){
@Override public void write( RecordConsumer rc){
rc.startMessage();
rc.startField("locations",0);
rc.startGroup();
rc.startField("element",0);
rc.startGroup();
rc.startField("latitude",0);
rc.addDouble(0.0);
rc.endField("latitude",0);
rc.startField("longitude",1);
rc.addDouble(0.0);
rc.endField("longitude",1);
rc.endGroup();
rc.startGroup();
rc.startField("latitude",0);
rc.addDouble(0.0);
rc.endField("latitude",0);
rc.startField("longitude",1);
rc.addDouble(180.0);
rc.endField("longitude",1);
rc.endGroup();
rc.endField("element",0);
rc.endGroup();
rc.endField("locations",0);
rc.endMessage();
}
}
);
ArrayWritable expected=list(record(new DoubleWritable(0.0),new DoubleWritable(0.0)),record(new DoubleWritable(0.0),new DoubleWritable(180.0)));
List records=read(test);
Assert.assertEquals("Should have only one record",1,records.size());
assertEquals("Should match expected record",expected,records.get(0));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testHiveRequiredGroupInList() throws Exception {
Path test=writeDirect("HiveRequiredGroupInList",Types.buildMessage().optionalGroup().as(LIST).repeatedGroup().requiredGroup().required(DOUBLE).named("latitude").required(DOUBLE).named("longitude").named("element").named("bag").named("locations").named("HiveRequiredGroupInList"),new DirectWriter(){
@Override public void write( RecordConsumer rc){
rc.startMessage();
rc.startField("locations",0);
rc.startGroup();
rc.startField("bag",0);
rc.startGroup();
rc.startField("element",0);
rc.startGroup();
rc.startField("latitude",0);
rc.addDouble(0.0);
rc.endField("latitude",0);
rc.startField("longitude",1);
rc.addDouble(180.0);
rc.endField("longitude",1);
rc.endGroup();
rc.endField("element",0);
rc.endGroup();
rc.startGroup();
rc.startField("element",0);
rc.startGroup();
rc.startField("latitude",0);
rc.addDouble(0.0);
rc.endField("latitude",0);
rc.startField("longitude",1);
rc.addDouble(0.0);
rc.endField("longitude",1);
rc.endGroup();
rc.endField("element",0);
rc.endGroup();
rc.endField("bag",0);
rc.endGroup();
rc.endField("locations",0);
rc.endMessage();
}
}
);
ArrayWritable expected=list(record(new DoubleWritable(0.0),new DoubleWritable(180.0)),record(new DoubleWritable(0.0),new DoubleWritable(0.0)));
List records=read(test);
Assert.assertEquals("Should have only one record",1,records.size());
assertEquals("Should match expected record",expected,records.get(0));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testThriftSingleFieldGroupInList() throws Exception {
Path test=writeDirect("ThriftSingleFieldGroupInList",Types.buildMessage().optionalGroup().as(LIST).repeatedGroup().required(INT64).named("count").named("single_element_groups_tuple").named("single_element_groups").named("ThriftSingleFieldGroupInList"),new DirectWriter(){
@Override public void write( RecordConsumer rc){
rc.startMessage();
rc.startField("single_element_groups",0);
rc.startGroup();
rc.startField("single_element_groups_tuple",0);
rc.startGroup();
rc.startField("count",0);
rc.addLong(1234L);
rc.endField("count",0);
rc.endGroup();
rc.startGroup();
rc.startField("count",0);
rc.addLong(2345L);
rc.endField("count",0);
rc.endGroup();
rc.endField("single_element_groups_tuple",0);
rc.endGroup();
rc.endField("single_element_groups",0);
rc.endMessage();
}
}
);
ArrayWritable expected=list(record(new LongWritable(1234L)),record(new LongWritable(2345L)));
List records=read(test);
Assert.assertEquals("Should have only one record",1,records.size());
assertEquals("Should match expected record",expected,records.get(0));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testUnannotatedListOfPrimitives() throws Exception {
MessageType fileSchema=Types.buildMessage().repeated(INT32).named("list_of_ints").named("UnannotatedListOfPrimitives");
Path test=writeDirect("UnannotatedListOfPrimitives",fileSchema,new DirectWriter(){
@Override public void write( RecordConsumer rc){
rc.startMessage();
rc.startField("list_of_ints",0);
rc.addInteger(34);
rc.addInteger(35);
rc.addInteger(36);
rc.endField("list_of_ints",0);
rc.endMessage();
}
}
);
ArrayWritable expected=list(new IntWritable(34),new IntWritable(35),new IntWritable(36));
List records=read(test);
Assert.assertEquals("Should have only one record",1,records.size());
assertEquals("Should match expected record",expected,records.get(0));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testAmbiguousSingleFieldGroupInList() throws Exception {
Path test=writeDirect("SingleFieldGroupInList",Types.buildMessage().optionalGroup().as(LIST).repeatedGroup().required(INT64).named("count").named("single_element_group").named("single_element_groups").named("SingleFieldGroupInList"),new DirectWriter(){
@Override public void write( RecordConsumer rc){
rc.startMessage();
rc.startField("single_element_groups",0);
rc.startGroup();
rc.startField("single_element_group",0);
rc.startGroup();
rc.startField("count",0);
rc.addLong(1234L);
rc.endField("count",0);
rc.endGroup();
rc.startGroup();
rc.startField("count",0);
rc.addLong(2345L);
rc.endField("count",0);
rc.endGroup();
rc.endField("single_element_group",0);
rc.endGroup();
rc.endField("single_element_groups",0);
rc.endMessage();
}
}
);
ArrayWritable expected=list(new LongWritable(1234L),new LongWritable(2345L));
List records=read(test);
Assert.assertEquals("Should have only one record",1,records.size());
assertEquals("Should match expected record",expected,records.get(0));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testAvroPrimitiveInList() throws Exception {
Path test=writeDirect("AvroPrimitiveInList",Types.buildMessage().requiredGroup().as(LIST).repeated(INT32).named("array").named("list_of_ints").named("AvroPrimitiveInList"),new DirectWriter(){
@Override public void write( RecordConsumer rc){
rc.startMessage();
rc.startField("list_of_ints",0);
rc.startGroup();
rc.startField("array",0);
rc.addInteger(34);
rc.addInteger(35);
rc.addInteger(36);
rc.endField("array",0);
rc.endGroup();
rc.endField("list_of_ints",0);
rc.endMessage();
}
}
);
ArrayWritable expected=list(new IntWritable(34),new IntWritable(35),new IntWritable(36));
List records=read(test);
Assert.assertEquals("Should have only one record",1,records.size());
assertEquals("Should match expected record",expected,records.get(0));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testThriftPrimitiveInList() throws Exception {
Path test=writeDirect("ThriftPrimitiveInList",Types.buildMessage().requiredGroup().as(LIST).repeated(INT32).named("list_of_ints_tuple").named("list_of_ints").named("ThriftPrimitiveInList"),new DirectWriter(){
@Override public void write( RecordConsumer rc){
rc.startMessage();
rc.startField("list_of_ints",0);
rc.startGroup();
rc.startField("list_of_ints_tuple",0);
rc.addInteger(34);
rc.addInteger(35);
rc.addInteger(36);
rc.endField("list_of_ints_tuple",0);
rc.endGroup();
rc.endField("list_of_ints",0);
rc.endMessage();
}
}
);
ArrayWritable expected=list(new IntWritable(34),new IntWritable(35),new IntWritable(36));
List records=read(test);
Assert.assertEquals("Should have only one record",1,records.size());
assertEquals("Should match expected record",expected,records.get(0));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testNewOptionalGroupInList() throws Exception {
Path test=writeDirect("NewOptionalGroupInList",Types.buildMessage().optionalGroup().as(LIST).repeatedGroup().optionalGroup().required(DOUBLE).named("latitude").required(DOUBLE).named("longitude").named("element").named("list").named("locations").named("NewOptionalGroupInList"),new DirectWriter(){
@Override public void write( RecordConsumer rc){
rc.startMessage();
rc.startField("locations",0);
rc.startGroup();
rc.startField("list",0);
rc.startGroup();
rc.startField("element",0);
rc.startGroup();
rc.startField("latitude",0);
rc.addDouble(0.0);
rc.endField("latitude",0);
rc.startField("longitude",1);
rc.addDouble(0.0);
rc.endField("longitude",1);
rc.endGroup();
rc.endField("element",0);
rc.endGroup();
rc.startGroup();
rc.endGroup();
rc.startGroup();
rc.startField("element",0);
rc.startGroup();
rc.startField("latitude",0);
rc.addDouble(0.0);
rc.endField("latitude",0);
rc.startField("longitude",1);
rc.addDouble(180.0);
rc.endField("longitude",1);
rc.endGroup();
rc.endField("element",0);
rc.endGroup();
rc.endField("list",0);
rc.endGroup();
rc.endField("locations",0);
rc.endMessage();
}
}
);
ArrayWritable expected=list(record(new DoubleWritable(0.0),new DoubleWritable(0.0)),null,record(new DoubleWritable(0.0),new DoubleWritable(180.0)));
List records=read(test);
Assert.assertEquals("Should have only one record",1,records.size());
assertEquals("Should match expected record",expected,records.get(0));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testUnannotatedListOfGroups() throws Exception {
Path test=writeDirect("UnannotatedListOfGroups",Types.buildMessage().repeatedGroup().required(FLOAT).named("x").required(FLOAT).named("y").named("list_of_points").named("UnannotatedListOfGroups"),new DirectWriter(){
@Override public void write( RecordConsumer rc){
rc.startMessage();
rc.startField("list_of_points",0);
rc.startGroup();
rc.startField("x",0);
rc.addFloat(1.0f);
rc.endField("x",0);
rc.startField("y",1);
rc.addFloat(1.0f);
rc.endField("y",1);
rc.endGroup();
rc.startGroup();
rc.startField("x",0);
rc.addFloat(2.0f);
rc.endField("x",0);
rc.startField("y",1);
rc.addFloat(2.0f);
rc.endField("y",1);
rc.endGroup();
rc.endField("list_of_points",0);
rc.endMessage();
}
}
);
ArrayWritable expected=list(record(new FloatWritable(1.0f),new FloatWritable(1.0f)),record(new FloatWritable(2.0f),new FloatWritable(2.0f)));
List records=read(test);
Assert.assertEquals("Should have only one record",1,records.size());
assertEquals("Should match expected record",expected,records.get(0));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testNewRequiredGroupInList() throws Exception {
Path test=writeDirect("NewRequiredGroupInList",Types.buildMessage().optionalGroup().as(LIST).repeatedGroup().requiredGroup().required(DOUBLE).named("latitude").required(DOUBLE).named("longitude").named("element").named("list").named("locations").named("NewRequiredGroupInList"),new DirectWriter(){
@Override public void write( RecordConsumer rc){
rc.startMessage();
rc.startField("locations",0);
rc.startGroup();
rc.startField("list",0);
rc.startGroup();
rc.startField("element",0);
rc.startGroup();
rc.startField("latitude",0);
rc.addDouble(0.0);
rc.endField("latitude",0);
rc.startField("longitude",1);
rc.addDouble(180.0);
rc.endField("longitude",1);
rc.endGroup();
rc.endField("element",0);
rc.endGroup();
rc.startGroup();
rc.startField("element",0);
rc.startGroup();
rc.startField("latitude",0);
rc.addDouble(0.0);
rc.endField("latitude",0);
rc.startField("longitude",1);
rc.addDouble(0.0);
rc.endField("longitude",1);
rc.endGroup();
rc.endField("element",0);
rc.endGroup();
rc.endField("list",0);
rc.endGroup();
rc.endField("locations",0);
rc.endMessage();
}
}
);
ArrayWritable expected=list(record(new DoubleWritable(0.0),new DoubleWritable(180.0)),record(new DoubleWritable(0.0),new DoubleWritable(0.0)));
List records=read(test);
Assert.assertEquals("Should have only one record",1,records.size());
assertEquals("Should match expected record",expected,records.get(0));
}
Class: org.apache.hadoop.hive.ql.io.parquet.TestDataWritableWriter UtilityVerifier EqualityVerifier HybridVerifier
@Test public void testExpectedStructTypeOnRecord() throws Exception {
String columnNames="structCol";
String columnTypes="int";
ArrayWritable hiveRecord=createGroup(createInt(1));
String fileSchema="message hive_schema {\n" + " optional group structCol {\n" + " optional int32 int;\n"+ " }\n"+ "}\n";
try {
writeParquetRecord(fileSchema,getParquetWritable(columnNames,columnTypes,hiveRecord));
fail();
}
catch ( RuntimeException e) {
assertEquals("Parquet record is malformed: Invalid data type: expected STRUCT type, but found: PRIMITIVE",e.getMessage());
}
}
UtilityVerifier EqualityVerifier HybridVerifier
@Test public void testExpectedMapTypeOnRecord() throws Exception {
String columnNames="mapCol";
String columnTypes="int";
ArrayWritable hiveRecord=createGroup(createInt(1));
String fileSchema="message hive_schema {\n" + " optional group mapCol (MAP) {\n" + " repeated group map (MAP_KEY_VALUE) {\n"+ " required binary key;\n"+ " optional int32 value;\n"+ " }\n"+ " }\n"+ "}\n";
try {
writeParquetRecord(fileSchema,getParquetWritable(columnNames,columnTypes,hiveRecord));
fail();
}
catch ( RuntimeException e) {
assertEquals("Parquet record is malformed: Invalid data type: expected MAP type, but found: PRIMITIVE",e.getMessage());
}
}
UtilityVerifier EqualityVerifier HybridVerifier
@Test public void testExpectedArrayTypeOnRecord() throws Exception {
String columnNames="arrayCol";
String columnTypes="int";
ArrayWritable hiveRecord=createGroup(createInt(1));
String fileSchema="message hive_schema {\n" + " optional group arrayCol (LIST) {\n" + " repeated group bag {\n"+ " optional int32 array_element;\n"+ " }\n"+ " }\n"+ "}\n";
try {
writeParquetRecord(fileSchema,getParquetWritable(columnNames,columnTypes,hiveRecord));
fail();
}
catch ( RuntimeException e) {
assertEquals("Parquet record is malformed: Invalid data type: expected LIST type, but found: PRIMITIVE",e.getMessage());
}
}
Class: org.apache.hadoop.hive.ql.io.parquet.TestHiveSchemaConverter APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testMapOriginalType() throws Exception {
final String hiveColumnTypes="map";
final String hiveColumnNames="mapCol";
final List columnNames=createHiveColumnsFrom(hiveColumnNames);
final List columnTypes=createHiveTypeInfoFrom(hiveColumnTypes);
final MessageType messageTypeFound=HiveSchemaConverter.convert(columnNames,columnTypes);
assertEquals(1,messageTypeFound.getFieldCount());
org.apache.parquet.schema.Type topLevel=messageTypeFound.getFields().get(0);
assertEquals("mapCol",topLevel.getName());
assertEquals(OriginalType.MAP,topLevel.getOriginalType());
assertEquals(Repetition.OPTIONAL,topLevel.getRepetition());
assertEquals(1,topLevel.asGroupType().getFieldCount());
org.apache.parquet.schema.Type secondLevel=topLevel.asGroupType().getFields().get(0);
assertEquals("map",secondLevel.getName());
assertEquals(OriginalType.MAP_KEY_VALUE,secondLevel.getOriginalType());
assertEquals(Repetition.REPEATED,secondLevel.getRepetition());
}
Class: org.apache.hadoop.hive.ql.io.parquet.TestMapStructures APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testStringMapOptionalPrimitive() throws Exception {
Path test=writeDirect("StringMapOptionalPrimitive",Types.buildMessage().optionalGroup().as(MAP).repeatedGroup().required(BINARY).as(UTF8).named("key").optional(INT32).named("value").named("key_value").named("votes").named("StringMapOptionalPrimitive"),new TestArrayCompatibility.DirectWriter(){
@Override public void write( RecordConsumer rc){
rc.startMessage();
rc.startField("votes",0);
rc.startGroup();
rc.startField("key_value",0);
rc.startGroup();
rc.startField("key",0);
rc.addBinary(Binary.fromString("lettuce"));
rc.endField("key",0);
rc.startField("value",1);
rc.addInteger(34);
rc.endField("value",1);
rc.endGroup();
rc.startGroup();
rc.startField("key",0);
rc.addBinary(Binary.fromString("kale"));
rc.endField("key",0);
rc.endGroup();
rc.startGroup();
rc.startField("key",0);
rc.addBinary(Binary.fromString("cabbage"));
rc.endField("key",0);
rc.startField("value",1);
rc.addInteger(18);
rc.endField("value",1);
rc.endGroup();
rc.endField("key_value",0);
rc.endGroup();
rc.endField("votes",0);
rc.endMessage();
}
}
);
ArrayWritable expected=list(record(new Text("lettuce"),new IntWritable(34)),record(new Text("kale"),null),record(new Text("cabbage"),new IntWritable(18)));
List records=read(test);
Assert.assertEquals("Should have only one record",1,records.size());
assertEquals("Should match expected record",expected,records.get(0));
deserialize(records.get(0),Arrays.asList("votes"),Arrays.asList("map"));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testNestedMap() throws Exception {
Path test=writeDirect("DoubleMapWithStructValue",Types.buildMessage().optionalGroup().as(MAP).repeatedGroup().optional(BINARY).as(UTF8).named("key").optionalGroup().as(MAP).repeatedGroup().optional(BINARY).as(UTF8).named("key").required(INT32).named("value").named("key_value").named("value").named("key_value").named("map_of_maps").named("NestedMap"),new TestArrayCompatibility.DirectWriter(){
@Override public void write( RecordConsumer rc){
rc.startMessage();
rc.startField("map_of_maps",0);
rc.startGroup();
rc.startField("key_value",0);
rc.startGroup();
rc.startField("key",0);
rc.addBinary(Binary.fromString("a"));
rc.endField("key",0);
rc.startField("value",1);
rc.startGroup();
rc.startField("key_value",0);
rc.startGroup();
rc.startField("key",0);
rc.addBinary(Binary.fromString("b"));
rc.endField("key",0);
rc.startField("value",1);
rc.addInteger(1);
rc.endField("value",1);
rc.endGroup();
rc.endField("key_value",0);
rc.endGroup();
rc.endField("value",1);
rc.endGroup();
rc.startGroup();
rc.startField("key",0);
rc.addBinary(Binary.fromString("b"));
rc.endField("key",0);
rc.startField("value",1);
rc.startGroup();
rc.startField("key_value",0);
rc.startGroup();
rc.startField("key",0);
rc.addBinary(Binary.fromString("a"));
rc.endField("key",0);
rc.startField("value",1);
rc.addInteger(-1);
rc.endField("value",1);
rc.endGroup();
rc.startGroup();
rc.startField("key",0);
rc.addBinary(Binary.fromString("b"));
rc.endField("key",0);
rc.startField("value",1);
rc.addInteger(-2);
rc.endField("value",1);
rc.endGroup();
rc.endField("key_value",0);
rc.endGroup();
rc.endField("value",1);
rc.endGroup();
rc.endField("key_value",0);
rc.endGroup();
rc.endField("map_of_maps",0);
rc.endMessage();
}
}
);
ArrayWritable expected=list(record(new Text("a"),record(record(new Text("b"),new IntWritable(1)))),record(new Text("b"),record(record(new Text("a"),new IntWritable(-1)),record(new Text("b"),new IntWritable(-2)))));
List records=read(test);
Assert.assertEquals("Should have only one record",1,records.size());
assertEquals("Should match expected record",expected,records.get(0));
deserialize(records.get(0),Arrays.asList("map_of_maps"),Arrays.asList("map>"));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testStringMapOfOptionalArray() throws Exception {
Path test=writeDirect("StringMapOfOptionalArray",Types.buildMessage().optionalGroup().as(MAP).repeatedGroup().required(BINARY).as(UTF8).named("key").optionalGroup().as(LIST).repeatedGroup().optional(BINARY).as(UTF8).named("element").named("list").named("value").named("key_value").named("examples").named("StringMapOfOptionalArray"),new TestArrayCompatibility.DirectWriter(){
@Override public void write( RecordConsumer rc){
rc.startMessage();
rc.startField("examples",0);
rc.startGroup();
rc.startField("key_value",0);
rc.startGroup();
rc.startField("key",0);
rc.addBinary(Binary.fromString("green"));
rc.endField("key",0);
rc.startField("value",1);
rc.startGroup();
rc.startField("list",0);
rc.startGroup();
rc.startField("element",0);
rc.addBinary(Binary.fromString("lettuce"));
rc.endField("element",0);
rc.endGroup();
rc.startGroup();
rc.startField("element",0);
rc.addBinary(Binary.fromString("kale"));
rc.endField("element",0);
rc.endGroup();
rc.startGroup();
rc.endGroup();
rc.endField("list",0);
rc.endGroup();
rc.endField("value",1);
rc.endGroup();
rc.startGroup();
rc.startField("key",0);
rc.addBinary(Binary.fromString("brown"));
rc.endField("key",0);
rc.endGroup();
rc.endField("key_value",0);
rc.endGroup();
rc.endField("examples",0);
rc.endMessage();
}
}
);
ArrayWritable expected=list(record(new Text("green"),record(new Text("lettuce"),new Text("kale"),null)),record(new Text("brown"),null));
List records=read(test);
Assert.assertEquals("Should have only one record",1,records.size());
assertEquals("Should match expected record",expected,records.get(0));
deserialize(records.get(0),Arrays.asList("examples"),Arrays.asList("map>"));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testStringMapRequiredPrimitive() throws Exception {
Path test=writeDirect("StringMapRequiredPrimitive",Types.buildMessage().optionalGroup().as(MAP).repeatedGroup().required(BINARY).as(UTF8).named("key").required(INT32).named("value").named("key_value").named("votes").named("StringMapRequiredPrimitive"),new TestArrayCompatibility.DirectWriter(){
@Override public void write( RecordConsumer rc){
rc.startMessage();
rc.startField("votes",0);
rc.startGroup();
rc.startField("key_value",0);
rc.startGroup();
rc.startField("key",0);
rc.addBinary(Binary.fromString("lettuce"));
rc.endField("key",0);
rc.startField("value",1);
rc.addInteger(34);
rc.endField("value",1);
rc.endGroup();
rc.startGroup();
rc.startField("key",0);
rc.addBinary(Binary.fromString("cabbage"));
rc.endField("key",0);
rc.startField("value",1);
rc.addInteger(18);
rc.endField("value",1);
rc.endGroup();
rc.endField("key_value",0);
rc.endGroup();
rc.endField("votes",0);
rc.endMessage();
}
}
);
ArrayWritable expected=list(record(new Text("lettuce"),new IntWritable(34)),record(new Text("cabbage"),new IntWritable(18)));
List records=read(test);
Assert.assertEquals("Should have only one record",1,records.size());
assertEquals("Should match expected record",expected,records.get(0));
deserialize(records.get(0),Arrays.asList("votes"),Arrays.asList("map"));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDoubleMapWithStructValue() throws Exception {
Path test=writeDirect("DoubleMapWithStructValue",Types.buildMessage().optionalGroup().as(MAP).repeatedGroup().optional(DOUBLE).named("key").optionalGroup().required(INT32).named("x").required(INT32).named("y").named("value").named("key_value").named("approx").named("DoubleMapWithStructValue"),new TestArrayCompatibility.DirectWriter(){
@Override public void write( RecordConsumer rc){
rc.startMessage();
rc.startField("approx",0);
rc.startGroup();
rc.startField("key_value",0);
rc.startGroup();
rc.startField("key",0);
rc.addDouble(3.14);
rc.endField("key",0);
rc.startField("value",1);
rc.startGroup();
rc.startField("x",0);
rc.addInteger(7);
rc.endField("x",0);
rc.startField("y",1);
rc.addInteger(22);
rc.endField("y",1);
rc.endGroup();
rc.endField("value",1);
rc.endGroup();
rc.endField("key_value",0);
rc.endGroup();
rc.endField("approx",0);
rc.endMessage();
}
}
);
ArrayWritable expected=list(record(new DoubleWritable(3.14),record(new IntWritable(7),new IntWritable(22))));
List records=read(test);
Assert.assertEquals("Should have only one record",1,records.size());
assertEquals("Should match expected record",expected,records.get(0));
deserialize(records.get(0),Arrays.asList("approx"),Arrays.asList("map>"));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testStringMapOfOptionalIntArray() throws Exception {
Path test=writeDirect("StringMapOfOptionalIntArray",Types.buildMessage().optionalGroup().as(MAP).repeatedGroup().required(BINARY).as(UTF8).named("key").optionalGroup().as(LIST).repeatedGroup().optional(INT32).named("element").named("list").named("value").named("key_value").named("examples").named("StringMapOfOptionalIntArray"),new TestArrayCompatibility.DirectWriter(){
@Override public void write( RecordConsumer rc){
rc.startMessage();
rc.startField("examples",0);
rc.startGroup();
rc.startField("key_value",0);
rc.startGroup();
rc.startField("key",0);
rc.addBinary(Binary.fromString("low"));
rc.endField("key",0);
rc.startField("value",1);
rc.startGroup();
rc.startField("list",0);
rc.startGroup();
rc.startField("element",0);
rc.addInteger(34);
rc.endField("element",0);
rc.endGroup();
rc.startGroup();
rc.startField("element",0);
rc.addInteger(35);
rc.endField("element",0);
rc.endGroup();
rc.startGroup();
rc.endGroup();
rc.endField("list",0);
rc.endGroup();
rc.endField("value",1);
rc.endGroup();
rc.startGroup();
rc.startField("key",0);
rc.addBinary(Binary.fromString("high"));
rc.endField("key",0);
rc.startField("value",1);
rc.startGroup();
rc.startField("list",0);
rc.startGroup();
rc.startField("element",0);
rc.addInteger(340);
rc.endField("element",0);
rc.endGroup();
rc.startGroup();
rc.startField("element",0);
rc.addInteger(360);
rc.endField("element",0);
rc.endGroup();
rc.endField("list",0);
rc.endGroup();
rc.endField("value",1);
rc.endGroup();
rc.endField("key_value",0);
rc.endGroup();
rc.endField("examples",0);
rc.endMessage();
}
}
);
ArrayWritable expected=list(record(new Text("low"),record(new IntWritable(34),new IntWritable(35),null)),record(new Text("high"),record(new IntWritable(340),new IntWritable(360))));
List records=read(test);
Assert.assertEquals("Should have only one record",1,records.size());
assertEquals("Should match expected record",expected,records.get(0));
deserialize(records.get(0),Arrays.asList("examples"),Arrays.asList("map>"));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testMapWithComplexKey() throws Exception {
Path test=writeDirect("MapWithComplexKey",Types.buildMessage().optionalGroup().as(MAP).repeatedGroup().requiredGroup().required(INT32).named("x").required(INT32).named("y").named("key").optional(DOUBLE).named("value").named("key_value").named("matrix").named("MapWithComplexKey"),new TestArrayCompatibility.DirectWriter(){
@Override public void write( RecordConsumer rc){
rc.startMessage();
rc.startField("matrix",0);
rc.startGroup();
rc.startField("key_value",0);
rc.startGroup();
rc.startField("key",0);
rc.startGroup();
rc.startField("x",0);
rc.addInteger(7);
rc.endField("x",0);
rc.startField("y",1);
rc.addInteger(22);
rc.endField("y",1);
rc.endGroup();
rc.endField("key",0);
rc.startField("value",1);
rc.addDouble(3.14);
rc.endField("value",1);
rc.endGroup();
rc.endField("key_value",0);
rc.endGroup();
rc.endField("matrix",0);
rc.endMessage();
}
}
);
ArrayWritable expected=list(record(record(new IntWritable(7),new IntWritable(22)),new DoubleWritable(3.14)));
List records=read(test);
Assert.assertEquals("Should have only one record",1,records.size());
assertEquals("Should match expected record",expected,records.get(0));
deserialize(records.get(0),Arrays.asList("matrix"),Arrays.asList("map,bigint>"));
}
Class: org.apache.hadoop.hive.ql.io.parquet.TestMapredParquetOutputFormat UtilityVerifier EqualityVerifier NullVerifier HybridVerifier
@SuppressWarnings("unchecked") @Test public void testGetHiveRecordWriter() throws IOException {
Properties tableProps=new Properties();
tableProps.setProperty("columns","foo,bar");
tableProps.setProperty("columns.types","int:int");
final Progressable mockProgress=mock(Progressable.class);
final ParquetOutputFormat outputFormat=(ParquetOutputFormat)mock(ParquetOutputFormat.class);
JobConf jobConf=new JobConf();
try {
new MapredParquetOutputFormat(outputFormat){
@Override protected ParquetRecordWriterWrapper getParquerRecordWriterWrapper( ParquetOutputFormat realOutputFormat, JobConf jobConf, String finalOutPath, Progressable progress, Properties tableProperties) throws IOException {
assertEquals(outputFormat,realOutputFormat);
assertNotNull(jobConf.get(DataWritableWriteSupport.PARQUET_HIVE_SCHEMA));
assertEquals("/foo",finalOutPath.toString());
assertEquals(mockProgress,progress);
throw new RuntimeException("passed tests");
}
}
.getHiveRecordWriter(jobConf,new Path("/foo"),null,false,tableProps,mockProgress);
fail("should throw runtime exception.");
}
catch ( RuntimeException e) {
assertEquals("passed tests",e.getMessage());
}
}
UtilityVerifier EqualityVerifier HybridVerifier
@Test public void testGetRecordWriterThrowsException(){
try {
new MapredParquetOutputFormat().getRecordWriter(null,null,null,null);
fail("should throw runtime exception.");
}
catch ( Exception e) {
assertEquals("Should never be used",e.getMessage());
}
}
Class: org.apache.hadoop.hive.ql.io.parquet.TestParquetRecordReaderWrapper APIUtilityVerifier EqualityVerifier
@Test public void testBuilderComplexTypes2() throws Exception {
SearchArgument sarg=SearchArgumentFactory.newBuilder().startAnd().lessThan("x",PredicateLeaf.Type.DATE,Date.valueOf("2005-3-12")).lessThanEquals("y",PredicateLeaf.Type.STRING,new HiveChar("hi",10).toString()).equals("z",PredicateLeaf.Type.DECIMAL,new HiveDecimalWritable("1.0")).end().build();
MessageType schema=MessageTypeParser.parseMessageType("message test {" + " required int32 x; required binary y; required binary z;}");
assertEquals("lteq(y, Binary{\"hi \"})",ParquetFilterPredicateConverter.toFilterPredicate(sarg,schema).toString());
sarg=SearchArgumentFactory.newBuilder().startNot().startOr().isNull("x",PredicateLeaf.Type.LONG).between("y",PredicateLeaf.Type.DECIMAL,new HiveDecimalWritable("10"),new HiveDecimalWritable("20.0")).in("z",PredicateLeaf.Type.LONG,1L,2L,3L).nullSafeEquals("a",PredicateLeaf.Type.STRING,new HiveVarchar("stinger",100).toString()).end().end().build();
schema=MessageTypeParser.parseMessageType("message test {" + " optional int32 x; required binary y; required int32 z;" + " optional binary a;}");
FilterPredicate p=ParquetFilterPredicateConverter.toFilterPredicate(sarg,schema);
String expected="and(and(not(eq(x, null)), not(or(or(eq(z, 1), eq(z, 2)), eq(z, 3)))), " + "not(eq(a, Binary{\"stinger\"})))";
assertEquals(expected,p.toString());
}
APIUtilityVerifier EqualityVerifier
@Test public void testBuilderComplexTypes() throws Exception {
SearchArgument sarg=SearchArgumentFactory.newBuilder().startAnd().lessThan("x",PredicateLeaf.Type.DATE,Date.valueOf("1970-1-11")).lessThanEquals("y",PredicateLeaf.Type.STRING,new HiveChar("hi",10).toString()).equals("z",PredicateLeaf.Type.DECIMAL,new HiveDecimalWritable("1.0")).end().build();
MessageType schema=MessageTypeParser.parseMessageType("message test {" + " required int32 x; required binary y; required binary z;}");
assertEquals("lteq(y, Binary{\"hi \"})",ParquetFilterPredicateConverter.toFilterPredicate(sarg,schema).toString());
sarg=SearchArgumentFactory.newBuilder().startNot().startOr().isNull("x",PredicateLeaf.Type.LONG).between("y",PredicateLeaf.Type.DECIMAL,new HiveDecimalWritable("10"),new HiveDecimalWritable("20.0")).in("z",PredicateLeaf.Type.LONG,1L,2L,3L).nullSafeEquals("a",PredicateLeaf.Type.STRING,new HiveVarchar("stinger",100).toString()).end().end().build();
schema=MessageTypeParser.parseMessageType("message test {" + " optional int32 x; required binary y; required int32 z;" + " optional binary a;}");
FilterPredicate p=ParquetFilterPredicateConverter.toFilterPredicate(sarg,schema);
String expected="and(and(not(eq(x, null)), not(or(or(eq(z, 1), eq(z, 2)), eq(z, 3)))), " + "not(eq(a, Binary{\"stinger\"})))";
assertEquals(expected,p.toString());
}
APIUtilityVerifier EqualityVerifier
@Test public void testBuilder() throws Exception {
SearchArgument sarg=SearchArgumentFactory.newBuilder().startNot().startOr().isNull("x",PredicateLeaf.Type.LONG).between("y",PredicateLeaf.Type.LONG,10L,20L).in("z",PredicateLeaf.Type.LONG,1L,2L,3L).nullSafeEquals("a",PredicateLeaf.Type.STRING,"stinger").end().end().build();
MessageType schema=MessageTypeParser.parseMessageType("message test {" + " optional int32 x; required int32 y; required int32 z;" + " optional binary a;}");
FilterPredicate p=ParquetFilterPredicateConverter.toFilterPredicate(sarg,schema);
String expected="and(and(and(not(eq(x, null)), not(and(lt(y, 20), not(lteq(y, 10))))), not(or(or(eq(z, 1), " + "eq(z, 2)), eq(z, 3)))), not(eq(a, Binary{\"stinger\"})))";
assertEquals(expected,p.toString());
}
APIUtilityVerifier EqualityVerifier
@Test public void testBuilderFloat() throws Exception {
SearchArgument sarg=SearchArgumentFactory.newBuilder().startAnd().lessThan("x",PredicateLeaf.Type.LONG,22L).lessThan("x1",PredicateLeaf.Type.LONG,22L).lessThanEquals("y",PredicateLeaf.Type.STRING,new HiveChar("hi",10).toString()).equals("z",PredicateLeaf.Type.FLOAT,new Double(0.22)).equals("z1",PredicateLeaf.Type.FLOAT,new Double(0.22)).end().build();
MessageType schema=MessageTypeParser.parseMessageType("message test {" + " required int32 x; required int32 x1;" + " required binary y; required float z; required float z1;}");
FilterPredicate p=ParquetFilterPredicateConverter.toFilterPredicate(sarg,schema);
String expected="and(and(and(and(lt(x, 22), lt(x1, 22))," + " lteq(y, Binary{\"hi \"})), eq(z, " + "0.22)), eq(z1, 0.22))";
assertEquals(expected,p.toString());
}
Class: org.apache.hadoop.hive.ql.io.parquet.TestParquetRowGroupFilter APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testRowGroupFilterTakeEffect() throws Exception {
columnNames="intCol";
columnTypes="int";
StructObjectInspector inspector=getObjectInspector(columnNames,columnTypes);
MessageType fileSchema=MessageTypeParser.parseMessageType("message hive_schema {\n" + " optional int32 intCol;\n" + "}\n");
conf.set(ColumnProjectionUtils.READ_COLUMN_NAMES_CONF_STR,"intCol");
conf.set("columns","intCol");
conf.set("columns.types","int");
Path testPath=writeDirect("RowGroupFilterTakeEffect",fileSchema,new DirectWriter(){
@Override public void write( RecordConsumer consumer){
for (int i=0; i < 100; i++) {
consumer.startMessage();
consumer.startField("int",0);
consumer.addInteger(i);
consumer.endField("int",0);
consumer.endMessage();
}
}
}
);
GenericUDF udf=new GenericUDFOPGreaterThan();
List children=Lists.newArrayList();
ExprNodeColumnDesc columnDesc=new ExprNodeColumnDesc(Integer.class,"intCol","T",false);
ExprNodeConstantDesc constantDesc=new ExprNodeConstantDesc(50);
children.add(columnDesc);
children.add(constantDesc);
ExprNodeGenericFuncDesc genericFuncDesc=new ExprNodeGenericFuncDesc(inspector,udf,children);
String searchArgumentStr=SerializationUtilities.serializeExpression(genericFuncDesc);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR,searchArgumentStr);
ParquetRecordReaderWrapper recordReader=(ParquetRecordReaderWrapper)new MapredParquetInputFormat().getRecordReader(new FileSplit(testPath,0,fileLength(testPath),(String[])null),conf,null);
Assert.assertEquals("row group is not filtered correctly",1,recordReader.getFiltedBlocks().size());
constantDesc=new ExprNodeConstantDesc(100);
children.set(1,constantDesc);
genericFuncDesc=new ExprNodeGenericFuncDesc(inspector,udf,children);
searchArgumentStr=SerializationUtilities.serializeExpression(genericFuncDesc);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR,searchArgumentStr);
recordReader=(ParquetRecordReaderWrapper)new MapredParquetInputFormat().getRecordReader(new FileSplit(testPath,0,fileLength(testPath),(String[])null),conf,null);
Assert.assertEquals("row group is not filtered correctly",0,recordReader.getFiltedBlocks().size());
}
Class: org.apache.hadoop.hive.ql.io.parquet.read.TestParquetFilterPredicate APIUtilityVerifier EqualityVerifier
@Test public void testFilterColumnsThatDoNoExistOnSchema(){
MessageType schema=MessageTypeParser.parseMessageType("message test { required int32 a; required binary stinger; }");
SearchArgument sarg=SearchArgumentFactory.newBuilder().startNot().startOr().isNull("a",PredicateLeaf.Type.LONG).between("y",PredicateLeaf.Type.LONG,10L,20L).in("z",PredicateLeaf.Type.LONG,1L,2L,3L).nullSafeEquals("a",PredicateLeaf.Type.STRING,"stinger").end().end().build();
FilterPredicate p=ParquetFilterPredicateConverter.toFilterPredicate(sarg,schema);
String expected="and(not(eq(a, null)), not(eq(a, Binary{\"stinger\"})))";
assertEquals(expected,p.toString());
}
APIUtilityVerifier EqualityVerifier
@Test public void testFilterFloatColumns(){
MessageType schema=MessageTypeParser.parseMessageType("message test { required float a; required int32 b; }");
SearchArgument sarg=SearchArgumentFactory.newBuilder().startNot().startOr().isNull("a",PredicateLeaf.Type.FLOAT).between("a",PredicateLeaf.Type.FLOAT,10.2,20.3).in("b",PredicateLeaf.Type.LONG,1L,2L,3L).end().end().build();
FilterPredicate p=ParquetFilterPredicateConverter.toFilterPredicate(sarg,schema);
String expected="and(and(not(eq(a, null)), not(and(lt(a, 20.3), not(lteq(a, 10.2))))), not(or(or(eq(b, 1), eq(b, 2)), eq(b, 3))))";
assertEquals(expected,p.toString());
}
Class: org.apache.hadoop.hive.ql.io.parquet.serde.TestAbstractParquetMapInspector InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testEmptyContainer(){
final ArrayWritable map=new ArrayWritable(ArrayWritable.class,new ArrayWritable[0]);
assertEquals("Wrong size",-1,inspector.getMapSize(map));
assertNull("Should be null",inspector.getMap(map));
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testNullMap(){
assertEquals("Wrong size",-1,inspector.getMapSize(null));
assertNull("Should be null",inspector.getMap(null));
}
InternalCallVerifier EqualityVerifier
@Test public void testHashMap(){
final Map map=new HashMap();
map.put(new IntWritable(0),new IntWritable(1));
map.put(new IntWritable(2),new IntWritable(3));
map.put(new IntWritable(4),new IntWritable(5));
map.put(new IntWritable(6),new IntWritable(7));
assertEquals("Wrong size",4,inspector.getMapSize(map));
assertEquals("Wrong result of inspection",map,inspector.getMap(map));
}
InternalCallVerifier EqualityVerifier
@Test public void testRegularMap(){
final Writable[] entry1=new Writable[]{new IntWritable(0),new IntWritable(1)};
final Writable[] entry2=new Writable[]{new IntWritable(2),new IntWritable(3)};
final ArrayWritable map=new ArrayWritable(ArrayWritable.class,new Writable[]{new ArrayWritable(Writable.class,entry1),new ArrayWritable(Writable.class,entry2)});
final Map expected=new HashMap();
expected.put(new IntWritable(0),new IntWritable(1));
expected.put(new IntWritable(2),new IntWritable(3));
assertEquals("Wrong size",2,inspector.getMapSize(map));
assertEquals("Wrong result of inspection",expected,inspector.getMap(map));
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testNullContainer(){
final ArrayWritable map=new ArrayWritable(ArrayWritable.class,null);
assertEquals("Wrong size",-1,inspector.getMapSize(map));
assertNull("Should be null",inspector.getMap(map));
}
Class: org.apache.hadoop.hive.ql.io.parquet.serde.TestDeepParquetHiveMapInspector InternalCallVerifier EqualityVerifier
@Test public void testHashMap(){
final Map map=new HashMap();
map.put(new IntWritable(0),new IntWritable(1));
map.put(new IntWritable(2),new IntWritable(3));
map.put(new IntWritable(4),new IntWritable(5));
map.put(new IntWritable(6),new IntWritable(7));
assertEquals("Wrong result of inspection",new IntWritable(1),inspector.getMapValueElement(map,new IntWritable(0)));
assertEquals("Wrong result of inspection",new IntWritable(3),inspector.getMapValueElement(map,new IntWritable(2)));
assertEquals("Wrong result of inspection",new IntWritable(5),inspector.getMapValueElement(map,new IntWritable(4)));
assertEquals("Wrong result of inspection",new IntWritable(7),inspector.getMapValueElement(map,new IntWritable(6)));
assertEquals("Wrong result of inspection",new IntWritable(1),inspector.getMapValueElement(map,new ShortWritable((short)0)));
assertEquals("Wrong result of inspection",new IntWritable(3),inspector.getMapValueElement(map,new ShortWritable((short)2)));
assertEquals("Wrong result of inspection",new IntWritable(5),inspector.getMapValueElement(map,new ShortWritable((short)4)));
assertEquals("Wrong result of inspection",new IntWritable(7),inspector.getMapValueElement(map,new ShortWritable((short)6)));
}
InternalCallVerifier EqualityVerifier
@Test public void testRegularMap(){
final Writable[] entry1=new Writable[]{new IntWritable(0),new IntWritable(1)};
final Writable[] entry2=new Writable[]{new IntWritable(2),new IntWritable(3)};
final ArrayWritable map=new ArrayWritable(ArrayWritable.class,new Writable[]{new ArrayWritable(Writable.class,entry1),new ArrayWritable(Writable.class,entry2)});
assertEquals("Wrong result of inspection",new IntWritable(1),inspector.getMapValueElement(map,new IntWritable(0)));
assertEquals("Wrong result of inspection",new IntWritable(3),inspector.getMapValueElement(map,new IntWritable(2)));
assertEquals("Wrong result of inspection",new IntWritable(1),inspector.getMapValueElement(map,new ShortWritable((short)0)));
assertEquals("Wrong result of inspection",new IntWritable(3),inspector.getMapValueElement(map,new ShortWritable((short)2)));
}
Class: org.apache.hadoop.hive.ql.io.parquet.serde.TestParquetHiveArrayInspector InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testEmptyContainer(){
final ArrayWritable list=new ArrayWritable(ArrayWritable.class,new ArrayWritable[0]);
assertEquals("Wrong size",-1,inspector.getListLength(list));
assertNull("Should be null",inspector.getList(list));
assertNull("Should be null",inspector.getListElement(list,0));
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testNullArray(){
assertEquals("Wrong size",-1,inspector.getListLength(null));
assertNull("Should be null",inspector.getList(null));
assertNull("Should be null",inspector.getListElement(null,0));
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testNullContainer(){
final ArrayWritable list=new ArrayWritable(ArrayWritable.class,null);
assertEquals("Wrong size",-1,inspector.getListLength(list));
assertNull("Should be null",inspector.getList(list));
assertNull("Should be null",inspector.getListElement(list,0));
}
IterativeVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testRegularList(){
final ArrayWritable list=new ArrayWritable(Writable.class,new Writable[]{new IntWritable(3),new IntWritable(5),new IntWritable(1)});
final List expected=new ArrayList();
expected.add(new IntWritable(3));
expected.add(new IntWritable(5));
expected.add(new IntWritable(1));
assertEquals("Wrong size",3,inspector.getListLength(list));
assertEquals("Wrong result of inspection",expected,inspector.getList(list));
for (int i=0; i < expected.size(); ++i) {
assertEquals("Wrong result of inspection",expected.get(i),inspector.getListElement(list,i));
}
assertNull("Should be null",inspector.getListElement(list,3));
}
Class: org.apache.hadoop.hive.ql.io.parquet.serde.TestStandardParquetHiveMapInspector InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testHashMap(){
final Map map=new HashMap();
map.put(new IntWritable(0),new IntWritable(1));
map.put(new IntWritable(2),new IntWritable(3));
map.put(new IntWritable(4),new IntWritable(5));
map.put(new IntWritable(6),new IntWritable(7));
assertEquals("Wrong result of inspection",new IntWritable(1),inspector.getMapValueElement(map,new IntWritable(0)));
assertEquals("Wrong result of inspection",new IntWritable(3),inspector.getMapValueElement(map,new IntWritable(2)));
assertEquals("Wrong result of inspection",new IntWritable(5),inspector.getMapValueElement(map,new IntWritable(4)));
assertEquals("Wrong result of inspection",new IntWritable(7),inspector.getMapValueElement(map,new IntWritable(6)));
assertNull("Wrong result of inspection",inspector.getMapValueElement(map,new ShortWritable((short)0)));
assertNull("Wrong result of inspection",inspector.getMapValueElement(map,new ShortWritable((short)2)));
assertNull("Wrong result of inspection",inspector.getMapValueElement(map,new ShortWritable((short)4)));
assertNull("Wrong result of inspection",inspector.getMapValueElement(map,new ShortWritable((short)6)));
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testRegularMap(){
final Writable[] entry1=new Writable[]{new IntWritable(0),new IntWritable(1)};
final Writable[] entry2=new Writable[]{new IntWritable(2),new IntWritable(3)};
final ArrayWritable map=new ArrayWritable(ArrayWritable.class,new Writable[]{new ArrayWritable(Writable.class,entry1),new ArrayWritable(Writable.class,entry2)});
assertEquals("Wrong result of inspection",new IntWritable(1),inspector.getMapValueElement(map,new IntWritable(0)));
assertEquals("Wrong result of inspection",new IntWritable(3),inspector.getMapValueElement(map,new IntWritable(2)));
assertNull("Wrong result of inspection",inspector.getMapValueElement(map,new ShortWritable((short)0)));
assertNull("Wrong result of inspection",inspector.getMapValueElement(map,new ShortWritable((short)2)));
}
Class: org.apache.hadoop.hive.ql.io.parquet.serde.primitive.TestParquetByteInspector EqualityVerifier
@Test public void testGet(){
ByteWritable obj=new ByteWritable((byte)15);
assertEquals((byte)15,inspector.get(obj));
}
InternalCallVerifier EqualityVerifier
@Test public void testByteWritable(){
ByteWritable obj=new ByteWritable((byte)5);
assertEquals(obj,inspector.getPrimitiveWritableObject(obj));
assertEquals((byte)5,inspector.getPrimitiveJavaObject(obj));
}
EqualityVerifier
@Test public void testSet(){
ByteWritable obj=new ByteWritable();
assertEquals(new ByteWritable((byte)12),inspector.set(obj,(byte)12));
}
EqualityVerifier
@Test public void testCreate(){
assertEquals(new ByteWritable((byte)8),inspector.create((byte)8));
}
InternalCallVerifier EqualityVerifier
@Test public void testIntWritable(){
IntWritable obj=new IntWritable(10);
assertEquals(new ByteWritable((byte)10),inspector.getPrimitiveWritableObject(obj));
assertEquals((byte)10,inspector.getPrimitiveJavaObject(obj));
}
Class: org.apache.hadoop.hive.ql.io.parquet.serde.primitive.TestParquetShortInspector InternalCallVerifier EqualityVerifier
@Test public void testIntWritable(){
IntWritable obj=new IntWritable(10);
assertEquals(new ShortWritable((short)10),inspector.getPrimitiveWritableObject(obj));
assertEquals((short)10,inspector.getPrimitiveJavaObject(obj));
}
EqualityVerifier
@Test public void testCreate(){
assertEquals(new ShortWritable((short)8),inspector.create((short)8));
}
EqualityVerifier
@Test public void testSet(){
ShortWritable obj=new ShortWritable();
assertEquals(new ShortWritable((short)12),inspector.set(obj,(short)12));
}
EqualityVerifier
@Test public void testGet(){
ShortWritable obj=new ShortWritable((short)15);
assertEquals((short)15,inspector.get(obj));
}
InternalCallVerifier EqualityVerifier
@Test public void testShortWritable(){
ShortWritable obj=new ShortWritable((short)5);
assertEquals(obj,inspector.getPrimitiveWritableObject(obj));
assertEquals((short)5,inspector.getPrimitiveJavaObject(obj));
}
Class: org.apache.hadoop.hive.ql.io.sarg.TestConvertAstToSearchArg APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void TestBigintSarg() throws Exception {
String serialAst="AQEAamF2YS51dGlsLkFycmF5TGlz9AECAQFvcmcuYXBhY2hlLmhhZG9vcC5oaXZlLnFsLnBsYW4uRXh" + "wck5vZGVDb2x1bW5EZXPjAQFi6QAAAWJpZ29y4wECb3JnLmFwYWNoZS5oYWRvb3AuaGl2ZS5zZXJkZT" + "IudHlwZWluZm8uUHJpbWl0aXZlVHlwZUluZu8BAWJpZ2lu9AEDb3JnLmFwYWNoZS5oYWRvb3AuaGl2Z"+ "S5xbC5wbGFuLkV4cHJOb2RlQ29uc3RhbnREZXPjAQECBwnywAEBBG9yZy5hcGFjaGUuaGFkb29wLmhp"+ "dmUucWwudWRmLmdlbmVyaWMuR2VuZXJpY1VERk9QRXF1YewBAAABgj0BRVFVQcwBBW9yZy5hcGFjaGU"+ "uaGFkb29wLmlvLkJvb2xlYW5Xcml0YWJs5QEAAAECAQFib29sZWHu";
SearchArgument sarg=new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst)).buildSearchArgument();
assertEquals("leaf-0",sarg.getExpression().toString());
assertEquals(1,sarg.getLeaves().size());
PredicateLeaf leaf=sarg.getLeaves().get(0);
assertEquals(PredicateLeaf.Type.LONG,leaf.getType());
assertEquals("(EQUALS bi 12345)",leaf.toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testExpression7() throws Exception {
String exprStr=" \n" + " \n" + " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " id \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " int \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 10 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " boolean \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " id \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 11 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " id \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 12 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " id \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 13 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " id \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 14 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " id \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 15 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " id \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 16 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " id \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 17 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " id \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 18 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " ";
SearchArgumentImpl sarg=(SearchArgumentImpl)ConvertAstToSearchArg.create(getFuncDesc(exprStr));
List leaves=sarg.getLeaves();
assertEquals(9,leaves.size());
MessageType schema=MessageTypeParser.parseMessageType("message test { required int32 id;" + " required binary first_name; }");
FilterPredicate p=ParquetFilterPredicateConverter.toFilterPredicate(sarg,schema);
String expected="and(and(and(and(and(and(and(and(and(and(and(and(and(and(and(and(and(" + "or(or(or(lt(id, 18), lt(id, 10)), lt(id, 13)), lt(id, 16)), " + "or(or(or(lt(id, 18), lt(id, 11)), lt(id, 13)), lt(id, 16))), "+ "or(or(or(lt(id, 18), lt(id, 12)), lt(id, 13)), lt(id, 16))), "+ "or(or(or(lt(id, 18), lt(id, 10)), lt(id, 14)), lt(id, 16))), "+ "or(or(or(lt(id, 18), lt(id, 11)), lt(id, 14)), lt(id, 16))), "+ "or(or(or(lt(id, 18), lt(id, 12)), lt(id, 14)), lt(id, 16))), "+ "or(or(or(lt(id, 18), lt(id, 10)), lt(id, 15)), lt(id, 16))), "+ "or(or(or(lt(id, 18), lt(id, 11)), lt(id, 15)), lt(id, 16))), "+ "or(or(or(lt(id, 18), lt(id, 12)), lt(id, 15)), lt(id, 16))), "+ "or(or(or(lt(id, 18), lt(id, 10)), lt(id, 13)), lt(id, 17))), "+ "or(or(or(lt(id, 18), lt(id, 11)), lt(id, 13)), lt(id, 17))), "+ "or(or(or(lt(id, 18), lt(id, 12)), lt(id, 13)), lt(id, 17))), "+ "or(or(or(lt(id, 18), lt(id, 10)), lt(id, 14)), lt(id, 17))), "+ "or(or(or(lt(id, 18), lt(id, 11)), lt(id, 14)), lt(id, 17))), "+ "or(or(or(lt(id, 18), lt(id, 12)), lt(id, 14)), lt(id, 17))), "+ "or(or(or(lt(id, 18), lt(id, 10)), lt(id, 15)), lt(id, 17))), "+ "or(or(or(lt(id, 18), lt(id, 11)), lt(id, 15)), lt(id, 17))), "+ "or(or(or(lt(id, 18), lt(id, 12)), lt(id, 15)), lt(id, 17)))";
assertEquals(p.toString(),expected);
PredicateLeaf leaf=leaves.get(0);
assertEquals(PredicateLeaf.Type.LONG,leaf.getType());
assertEquals(PredicateLeaf.Operator.LESS_THAN,leaf.getOperator());
assertEquals("id",leaf.getColumnName());
assertEquals(18L,leaf.getLiteral());
leaf=leaves.get(1);
assertEquals(PredicateLeaf.Type.LONG,leaf.getType());
assertEquals(PredicateLeaf.Operator.LESS_THAN,leaf.getOperator());
assertEquals("id",leaf.getColumnName());
assertEquals(10L,leaf.getLiteral());
leaf=leaves.get(2);
assertEquals(PredicateLeaf.Type.LONG,leaf.getType());
assertEquals(PredicateLeaf.Operator.LESS_THAN,leaf.getOperator());
assertEquals("id",leaf.getColumnName());
assertEquals(13L,leaf.getLiteral());
leaf=leaves.get(3);
assertEquals(PredicateLeaf.Type.LONG,leaf.getType());
assertEquals(PredicateLeaf.Operator.LESS_THAN,leaf.getOperator());
assertEquals("id",leaf.getColumnName());
assertEquals(16L,leaf.getLiteral());
leaf=leaves.get(4);
assertEquals(PredicateLeaf.Type.LONG,leaf.getType());
assertEquals(PredicateLeaf.Operator.LESS_THAN,leaf.getOperator());
assertEquals("id",leaf.getColumnName());
assertEquals(11L,leaf.getLiteral());
leaf=leaves.get(5);
assertEquals(PredicateLeaf.Type.LONG,leaf.getType());
assertEquals(PredicateLeaf.Operator.LESS_THAN,leaf.getOperator());
assertEquals("id",leaf.getColumnName());
assertEquals(12L,leaf.getLiteral());
leaf=leaves.get(6);
assertEquals(PredicateLeaf.Type.LONG,leaf.getType());
assertEquals(PredicateLeaf.Operator.LESS_THAN,leaf.getOperator());
assertEquals("id",leaf.getColumnName());
assertEquals(14L,leaf.getLiteral());
leaf=leaves.get(7);
assertEquals(PredicateLeaf.Type.LONG,leaf.getType());
assertEquals(PredicateLeaf.Operator.LESS_THAN,leaf.getOperator());
assertEquals("id",leaf.getColumnName());
assertEquals(15L,leaf.getLiteral());
leaf=leaves.get(8);
assertEquals(PredicateLeaf.Type.LONG,leaf.getType());
assertEquals(PredicateLeaf.Operator.LESS_THAN,leaf.getOperator());
assertEquals("id",leaf.getColumnName());
assertEquals(17L,leaf.getLiteral());
assertEquals("(and" + " (or leaf-0 leaf-1 leaf-2 leaf-3)" + " (or leaf-0 leaf-4 leaf-2 leaf-3)"+ " (or leaf-0 leaf-5 leaf-2 leaf-3)"+ " (or leaf-0 leaf-1 leaf-6 leaf-3)"+ " (or leaf-0 leaf-4 leaf-6 leaf-3)"+ " (or leaf-0 leaf-5 leaf-6 leaf-3)"+ " (or leaf-0 leaf-1 leaf-7 leaf-3)"+ " (or leaf-0 leaf-4 leaf-7 leaf-3)"+ " (or leaf-0 leaf-5 leaf-7 leaf-3)"+ " (or leaf-0 leaf-1 leaf-2 leaf-8)"+ " (or leaf-0 leaf-4 leaf-2 leaf-8)"+ " (or leaf-0 leaf-5 leaf-2 leaf-8)"+ " (or leaf-0 leaf-1 leaf-6 leaf-8)"+ " (or leaf-0 leaf-4 leaf-6 leaf-8)"+ " (or leaf-0 leaf-5 leaf-6 leaf-8)"+ " (or leaf-0 leaf-1 leaf-7 leaf-8)"+ " (or leaf-0 leaf-4 leaf-7 leaf-8)"+ " (or leaf-0 leaf-5 leaf-7 leaf-8))",sarg.getExpression().toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void TestDoubleSarg() throws Exception {
String serialAst="AQEAamF2YS51dGlsLkFycmF5TGlz9AECAQFvcmcuYXBhY2hlLmhhZG9vcC5oaXZlLnFsLnBsYW4uRXh" + "wck5vZGVDb2x1bW5EZXPjAQFkYuwAAAFiaWdvcuMBAm9yZy5hcGFjaGUuaGFkb29wLmhpdmUuc2VyZG" + "UyLnR5cGVpbmZvLlByaW1pdGl2ZVR5cGVJbmbvAQFkb3VibOUBA29yZy5hcGFjaGUuaGFkb29wLmhpd"+ "mUucWwucGxhbi5FeHByTm9kZUNvbnN0YW50RGVz4wEBAgcKQAGZmZmZmZoBBG9yZy5hcGFjaGUuaGFk"+ "b29wLmhpdmUucWwudWRmLmdlbmVyaWMuR2VuZXJpY1VERk9QRXF1YewBAAABgj0BRVFVQcwBBW9yZy5"+ "hcGFjaGUuaGFkb29wLmlvLkJvb2xlYW5Xcml0YWJs5QEAAAECAQFib29sZWHu";
SearchArgument sarg=new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst)).buildSearchArgument();
assertEquals("leaf-0",sarg.getExpression().toString());
assertEquals(1,sarg.getLeaves().size());
PredicateLeaf leaf=sarg.getLeaves().get(0);
assertEquals(PredicateLeaf.Type.FLOAT,leaf.getType());
assertEquals("(EQUALS dbl 2.2)",leaf.toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testExpression10() throws Exception {
String exprStr=" \n" + " \n" + " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " id \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " int \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 10 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " boolean \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " id \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 10 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " ";
SearchArgumentImpl sarg=(SearchArgumentImpl)ConvertAstToSearchArg.create(getFuncDesc(exprStr));
List leaves=sarg.getLeaves();
assertEquals(1,leaves.size());
MessageType schema=MessageTypeParser.parseMessageType("message test { required int32 id;" + " required binary first_name; }");
FilterPredicate p=ParquetFilterPredicateConverter.toFilterPredicate(sarg,schema);
String expected="and(not(lt(id, 10)), not(lt(id, 10)))";
assertEquals(expected,p.toString());
assertEquals(PredicateLeaf.Type.LONG,leaves.get(0).getType());
assertEquals(PredicateLeaf.Operator.LESS_THAN,leaves.get(0).getOperator());
assertEquals("id",leaves.get(0).getColumnName());
assertEquals(10L,leaves.get(0).getLiteral());
assertEquals("(and (not leaf-0) (not leaf-0))",sarg.getExpression().toString());
assertNoSharedNodes(sarg.getExpression(),Sets.newIdentityHashSet());
assertEquals(TruthValue.NO,sarg.evaluate(values(TruthValue.YES)));
assertEquals(TruthValue.YES,sarg.evaluate(values(TruthValue.NO)));
assertEquals(TruthValue.NULL,sarg.evaluate(values(TruthValue.NULL)));
assertEquals(TruthValue.NO_NULL,sarg.evaluate(values(TruthValue.YES_NULL)));
assertEquals(TruthValue.YES_NULL,sarg.evaluate(values(TruthValue.NO_NULL)));
assertEquals(TruthValue.YES_NO,sarg.evaluate(values(TruthValue.YES_NO)));
assertEquals(TruthValue.YES_NO_NULL,sarg.evaluate(values(TruthValue.YES_NO_NULL)));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testExpression4() throws Exception {
String exprStr=" \n" + " \n" + " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " id \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " int \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 12 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " boolean \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " first_name \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " string \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " john \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " sue \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " id \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 34 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 50 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ "\n";
SearchArgumentImpl sarg=(SearchArgumentImpl)ConvertAstToSearchArg.create(getFuncDesc(exprStr));
List leaves=sarg.getLeaves();
assertEquals(3,leaves.size());
String[] conditions=new String[]{"not(eq(id, 12))","or(eq(first_name, Binary{\"john\"}), eq(first_name, Binary{\"sue\"}))","or(eq(id, 34), eq(id, 50))"};
MessageType schema=MessageTypeParser.parseMessageType("message test { required int32 id;" + " required binary first_name; }");
FilterPredicate p=ParquetFilterPredicateConverter.toFilterPredicate(sarg,schema);
String expected=String.format("and(and(%1$s, %2$s), %3$s)",conditions);
assertEquals(expected,p.toString());
PredicateLeaf leaf=leaves.get(0);
assertEquals(PredicateLeaf.Type.LONG,leaf.getType());
assertEquals(PredicateLeaf.Operator.EQUALS,leaf.getOperator());
assertEquals("id",leaf.getColumnName());
assertEquals(12L,leaf.getLiteral());
leaf=leaves.get(1);
assertEquals(PredicateLeaf.Type.STRING,leaf.getType());
assertEquals(PredicateLeaf.Operator.IN,leaf.getOperator());
assertEquals("first_name",leaf.getColumnName());
assertEquals("john",leaf.getLiteralList().get(0));
assertEquals("sue",leaf.getLiteralList().get(1));
leaf=leaves.get(2);
assertEquals(PredicateLeaf.Type.LONG,leaf.getType());
assertEquals(PredicateLeaf.Operator.IN,leaf.getOperator());
assertEquals("id",leaf.getColumnName());
assertEquals(34L,leaf.getLiteralList().get(0));
assertEquals(50L,leaf.getLiteralList().get(1));
assertEquals("(and (not leaf-0) leaf-1 leaf-2)",sarg.getExpression().toString());
assertNoSharedNodes(sarg.getExpression(),Sets.newIdentityHashSet());
assertEquals(TruthValue.YES,sarg.evaluate(values(TruthValue.NO,TruthValue.YES,TruthValue.YES)));
assertEquals(TruthValue.NULL,sarg.evaluate(values(TruthValue.NULL,TruthValue.YES,TruthValue.YES)));
assertEquals(TruthValue.NULL,sarg.evaluate(values(TruthValue.NO,TruthValue.NULL,TruthValue.YES)));
assertEquals(TruthValue.NO,sarg.evaluate(values(TruthValue.YES,TruthValue.YES,TruthValue.YES)));
assertEquals(TruthValue.NO,sarg.evaluate(values(TruthValue.NO,TruthValue.YES,TruthValue.NO)));
assertEquals(TruthValue.NO,sarg.evaluate(values(TruthValue.NO,TruthValue.YES_NULL,TruthValue.NO)));
assertEquals(TruthValue.NO_NULL,sarg.evaluate(values(TruthValue.NO,TruthValue.NULL,TruthValue.YES_NO_NULL)));
assertEquals(TruthValue.NO_NULL,sarg.evaluate(values(TruthValue.NO,TruthValue.YES,TruthValue.NO_NULL)));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void TestBooleanSarg() throws Exception {
String serialAst="AQEAamF2YS51dGlsLkFycmF5TGlz9AECAQFvcmcuYXBhY2hlLmhhZG9vcC5oaXZlLnFsLnBsYW4uRXh" + "wck5vZGVHZW5lcmljRnVuY0Rlc+MBAQABAgECb3JnLmFwYWNoZS5oYWRvb3AuaGl2ZS5xbC5wbGFuLk" + "V4cHJOb2RlQ29sdW1uRGVz4wEBYrEAAAFib29sb3LjAQNvcmcuYXBhY2hlLmhhZG9vcC5oaXZlLnNlc"+ "mRlMi50eXBlaW5mby5QcmltaXRpdmVUeXBlSW5m7wEBYm9vbGVh7gEEb3JnLmFwYWNoZS5oYWRvb3Au"+ "aGl2ZS5xbC5wbGFuLkV4cHJOb2RlQ29uc3RhbnREZXPjAQEDCQUBAQVvcmcuYXBhY2hlLmhhZG9vcC5"+ "oaXZlLnFsLnVkZi5nZW5lcmljLkdlbmVyaWNVREZPUEVxdWHsAQAAAYI9AUVRVUHMAQZvcmcuYXBhY2"+ "hlLmhhZG9vcC5pby5Cb29sZWFuV3JpdGFibOUBAAABAwkBAgEBYrIAAAgBAwkBB29yZy5hcGFjaGUua"+ "GFkb29wLmhpdmUucWwudWRmLmdlbmVyaWMuR2VuZXJpY1VERk9QQW7kAQEGAQAAAQMJ";
SearchArgument sarg=new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst)).buildSearchArgument();
assertEquals("(and leaf-0 leaf-1)",sarg.getExpression().toString());
assertEquals(2,sarg.getLeaves().size());
PredicateLeaf leaf=sarg.getLeaves().get(0);
assertEquals(PredicateLeaf.Type.BOOLEAN,leaf.getType());
assertEquals("(EQUALS b1 true)",leaf.toString());
leaf=sarg.getLeaves().get(1);
assertEquals(PredicateLeaf.Type.BOOLEAN,leaf.getType());
assertEquals("(EQUALS b2 true)",leaf.toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void TestDateSarg() throws Exception {
String serialAst="AQEAamF2YS51dGlsLkFycmF5TGlz9AECAQFvcmcuYXBhY2hlLmhhZG9vcC5oaXZlLnFsLnBsYW4uRXh" + "wck5vZGVDb2x1bW5EZXPjAQFk9AAAAWJpZ29y4wECb3JnLmFwYWNoZS5oYWRvb3AuaGl2ZS5zZXJkZT" + "IudHlwZWluZm8uUHJpbWl0aXZlVHlwZUluZu8BAWRhdOUBA29yZy5hcGFjaGUuaGFkb29wLmhpdmUuc"+ "WwucGxhbi5FeHByTm9kZUNvbnN0YW50RGVz4wEBAgEBc3RyaW7nAwEyMDE1LTA1LTC1AQRvcmcuYXBh"+ "Y2hlLmhhZG9vcC5oaXZlLnFsLnVkZi5nZW5lcmljLkdlbmVyaWNVREZPUEVxdWHsAQAAAYI9AUVRVUH"+ "MAQVvcmcuYXBhY2hlLmhhZG9vcC5pby5Cb29sZWFuV3JpdGFibOUBAAABAgEBYm9vbGVh7g==";
SearchArgument sarg=new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst)).buildSearchArgument();
assertEquals("leaf-0",sarg.getExpression().toString());
assertEquals(1,sarg.getLeaves().size());
PredicateLeaf leaf=sarg.getLeaves().get(0);
assertEquals(PredicateLeaf.Type.DATE,leaf.getType());
assertEquals("(EQUALS dt 2015-05-05)",leaf.toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void TestCharSarg() throws Exception {
String serialAst="AQEAamF2YS51dGlsLkFycmF5TGlz9AECAQFvcmcuYXBhY2hlLmhhZG9vcC5oaXZlLnFsLnBsYW4uRXh" + "wck5vZGVDb2x1bW5EZXPjAQFj6AAAAWJpZ29y4wECb3JnLmFwYWNoZS5oYWRvb3AuaGl2ZS5zZXJkZT" + "IudHlwZWluZm8uQ2hhclR5cGVJbmbvARQBY2hh8gEDb3JnLmFwYWNoZS5oYWRvb3AuaGl2ZS5xbC5wb"+ "GFuLkV4cHJOb2RlQ29uc3RhbnREZXPjAQEEb3JnLmFwYWNoZS5oYWRvb3AuaGl2ZS5zZXJkZTIudHlw"+ "ZWluZm8uUHJpbWl0aXZlVHlwZUluZu8BAXN0cmlu5wMBY2hhciAgICAgoAEFb3JnLmFwYWNoZS5oYWR"+ "vb3AuaGl2ZS5xbC51ZGYuZ2VuZXJpYy5HZW5lcmljVURGT1BFcXVh7AEAAAGCPQFFUVVBzAEGb3JnLm"+ "FwYWNoZS5oYWRvb3AuaW8uQm9vbGVhbldyaXRhYmzlAQAAAQQBAWJvb2xlYe4=";
SearchArgument sarg=new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst)).buildSearchArgument();
assertEquals("leaf-0",sarg.getExpression().toString());
assertEquals(1,sarg.getLeaves().size());
PredicateLeaf leaf=sarg.getLeaves().get(0);
assertEquals(PredicateLeaf.Type.STRING,leaf.getType());
assertEquals("(EQUALS ch char )",leaf.toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void TestDecimalSarg() throws Exception {
String serialAst="AQEAamF2YS51dGlsLkFycmF5TGlz9AECAQFvcmcuYXBhY2hlLmhhZG9vcC5oaXZlLnFsLnBsYW4uRXh" + "wck5vZGVDb2x1bW5EZXPjAQFkZeMAAAFiaWdvcuMBAm9yZy5hcGFjaGUuaGFkb29wLmhpdmUuc2VyZG" + "UyLnR5cGVpbmZvLkRlY2ltYWxUeXBlSW5m7wEUAAFkZWNpbWHsAQNvcmcuYXBhY2hlLmhhZG9vcC5oa"+ "XZlLnFsLnBsYW4uRXhwck5vZGVDb25zdGFudERlc+MBAQRvcmcuYXBhY2hlLmhhZG9vcC5oaXZlLnNl"+ "cmRlMi50eXBlaW5mby5QcmltaXRpdmVUeXBlSW5m7wEBaW70AvYBAQVvcmcuYXBhY2hlLmhhZG9vcC5"+ "oaXZlLnFsLnVkZi5nZW5lcmljLkdlbmVyaWNVREZPUEVxdWHsAQAAAYI9AUVRVUHMAQZvcmcuYXBhY2"+ "hlLmhhZG9vcC5pby5Cb29sZWFuV3JpdGFibOUBAAABBAEBYm9vbGVh7g==";
SearchArgument sarg=new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst)).buildSearchArgument();
assertEquals("leaf-0",sarg.getExpression().toString());
assertEquals(1,sarg.getLeaves().size());
PredicateLeaf leaf=sarg.getLeaves().get(0);
assertEquals(PredicateLeaf.Type.DECIMAL,leaf.getType());
assertEquals("(EQUALS dec 123)",leaf.toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void TestVarcharSarg() throws Exception {
String serialAst="AQEAamF2YS51dGlsLkFycmF5TGlz9AECAQFvcmcuYXBhY2hlLmhhZG9vcC5oaXZlLnFsLnBsYW4uRXh" + "wck5vZGVDb2x1bW5EZXPjAQF24wAAAWJpZ29y4wECb3JnLmFwYWNoZS5oYWRvb3AuaGl2ZS5zZXJkZT" + "IudHlwZWluZm8uVmFyY2hhclR5cGVJbmbvAcgBAXZhcmNoYfIBA29yZy5hcGFjaGUuaGFkb29wLmhpd"+ "mUucWwucGxhbi5FeHByTm9kZUNvbnN0YW50RGVz4wEBBG9yZy5hcGFjaGUuaGFkb29wLmhpdmUuc2Vy"+ "ZGUyLnR5cGVpbmZvLlByaW1pdGl2ZVR5cGVJbmbvAQFzdHJpbucDAXZhcmlhYmzlAQVvcmcuYXBhY2h"+ "lLmhhZG9vcC5oaXZlLnFsLnVkZi5nZW5lcmljLkdlbmVyaWNVREZPUEVxdWHsAQAAAYI9AUVRVUHMAQ"+ "ZvcmcuYXBhY2hlLmhhZG9vcC5pby5Cb29sZWFuV3JpdGFibOUBAAABBAEBYm9vbGVh7g==";
SearchArgument sarg=new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst)).buildSearchArgument();
assertEquals("leaf-0",sarg.getExpression().toString());
assertEquals(1,sarg.getLeaves().size());
PredicateLeaf leaf=sarg.getLeaves().get(0);
assertEquals(PredicateLeaf.Type.STRING,leaf.getType());
assertEquals("(EQUALS vc variable)",leaf.toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testExpression5() throws Exception {
String exprStr=" \n" + " \n" + " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " first_name \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " string \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " owen \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " boolean \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " foobar \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " last_name \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " int \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 4 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " org.apache.hadoop.hive.ql.udf.UDFSubstr \n"+ " \n"+ " \n"+ " substr \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " false \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " first_name \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " david \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " greg \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n";
SearchArgumentImpl sarg=(SearchArgumentImpl)ConvertAstToSearchArg.create(getFuncDesc(exprStr));
List leaves=sarg.getLeaves();
assertEquals(1,leaves.size());
MessageType schema=MessageTypeParser.parseMessageType("message test { required int32 id;" + " required binary first_name; }");
FilterPredicate p=ParquetFilterPredicateConverter.toFilterPredicate(sarg,schema);
String expected="and(lt(first_name, Binary{\"greg\"}), not(lteq(first_name, Binary{\"david\"})))";
assertEquals(p.toString(),expected);
assertEquals(PredicateLeaf.Type.STRING,leaves.get(0).getType());
assertEquals(PredicateLeaf.Operator.BETWEEN,leaves.get(0).getOperator());
assertEquals("first_name",leaves.get(0).getColumnName());
assertEquals("leaf-0",sarg.getExpression().toString());
assertNoSharedNodes(sarg.getExpression(),Sets.newIdentityHashSet());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void TestTimestampSarg() throws Exception {
String serialAst="AQEAamF2YS51dGlsLkFycmF5TGlz9AECAQFvcmcuYXBhY2hlLmhhZG9vcC5oaXZlLnFsLn" + "BsYW4uRXhwck5vZGVDb2x1bW5EZXPjAQF08wAAAWJpZ29y4wECb3JnLmFwYWNoZS5o" + "YWRvb3AuaGl2ZS5zZXJkZTIudHlwZWluZm8uUHJpbWl0aXZlVHlwZUluZu8BAXRpbW"+ "VzdGFt8AEDb3JnLmFwYWNoZS5oYWRvb3AuaGl2ZS5xbC5wbGFuLkV4cHJOb2RlQ29u"+ "c3RhbnREZXPjAQECAQFzdHJpbucDATIwMTUtMDMtMTcgMTI6MzQ6NbYBBG9yZy5hcG"+ "FjaGUuaGFkb29wLmhpdmUucWwudWRmLmdlbmVyaWMuR2VuZXJpY1VERk9QRXF1YewB"+ "AAABgj0BRVFVQcwBBW9yZy5hcGFjaGUuaGFkb29wLmlvLkJvb2xlYW5Xcml0YWJs5Q"+ "EAAAECAQFib29sZWHu";
SearchArgument sarg=new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst)).buildSearchArgument();
assertEquals("leaf-0",sarg.getExpression().toString());
assertEquals(1,sarg.getLeaves().size());
PredicateLeaf leaf=sarg.getLeaves().get(0);
assertEquals(PredicateLeaf.Type.TIMESTAMP,leaf.getType());
assertEquals("(EQUALS ts 2015-03-17 12:34:56.0)",leaf.toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testExpression2() throws Exception {
String exprStr=" \n" + " \n" + " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " first_name \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " string \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " boolean \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " first_name \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " sue \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " id \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " int \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 12 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " id \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 4 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n";
SearchArgumentImpl sarg=(SearchArgumentImpl)ConvertAstToSearchArg.create(getFuncDesc(exprStr));
List leaves=sarg.getLeaves();
assertEquals(4,leaves.size());
String[] conditions=new String[]{"eq(first_name, null)","not(eq(first_name, Binary{\"sue\"}))","not(lt(id, 12))","lteq(id, 4)"};
MessageType schema=MessageTypeParser.parseMessageType("message test { required int32 id;" + " required binary first_name; }");
FilterPredicate p=ParquetFilterPredicateConverter.toFilterPredicate(sarg,schema);
String expected=String.format("or(or(or(%1$s, %2$s), %3$s), %4$s)",conditions);
assertEquals(expected,p.toString());
PredicateLeaf leaf=leaves.get(0);
assertEquals(PredicateLeaf.Type.STRING,leaf.getType());
assertEquals(PredicateLeaf.Operator.IS_NULL,leaf.getOperator());
assertEquals("first_name",leaf.getColumnName());
assertEquals(null,leaf.getLiteral());
assertEquals(null,leaf.getLiteralList());
leaf=leaves.get(1);
assertEquals(PredicateLeaf.Type.STRING,leaf.getType());
assertEquals(PredicateLeaf.Operator.EQUALS,leaf.getOperator());
assertEquals("first_name",leaf.getColumnName());
assertEquals("sue",leaf.getLiteral());
leaf=leaves.get(2);
assertEquals(PredicateLeaf.Type.LONG,leaf.getType());
assertEquals(PredicateLeaf.Operator.LESS_THAN,leaf.getOperator());
assertEquals("id",leaf.getColumnName());
assertEquals(12L,leaf.getLiteral());
leaf=leaves.get(3);
assertEquals(PredicateLeaf.Type.LONG,leaf.getType());
assertEquals(PredicateLeaf.Operator.LESS_THAN_EQUALS,leaf.getOperator());
assertEquals("id",leaf.getColumnName());
assertEquals(4L,leaf.getLiteral());
assertEquals("(or leaf-0 (not leaf-1) (not leaf-2) leaf-3)",sarg.getExpression().toString());
assertNoSharedNodes(sarg.getExpression(),Sets.newIdentityHashSet());
assertEquals(TruthValue.NO,sarg.evaluate(values(TruthValue.NO,TruthValue.YES,TruthValue.YES,TruthValue.NO)));
assertEquals(TruthValue.YES,sarg.evaluate(values(TruthValue.YES,TruthValue.YES,TruthValue.YES,TruthValue.NO)));
assertEquals(TruthValue.YES,sarg.evaluate(values(TruthValue.NO,TruthValue.NO,TruthValue.YES,TruthValue.NO)));
assertEquals(TruthValue.YES,sarg.evaluate(values(TruthValue.NO,TruthValue.YES,TruthValue.NO,TruthValue.NO)));
assertEquals(TruthValue.YES,sarg.evaluate(values(TruthValue.NO,TruthValue.YES,TruthValue.YES,TruthValue.YES)));
assertEquals(TruthValue.NULL,sarg.evaluate(values(TruthValue.NULL,TruthValue.YES,TruthValue.YES,TruthValue.NO)));
assertEquals(TruthValue.NULL,sarg.evaluate(values(TruthValue.NO,TruthValue.NULL,TruthValue.YES,TruthValue.NO)));
assertEquals(TruthValue.NULL,sarg.evaluate(values(TruthValue.NO,TruthValue.YES,TruthValue.NULL,TruthValue.NO)));
assertEquals(TruthValue.NULL,sarg.evaluate(values(TruthValue.NO,TruthValue.YES,TruthValue.YES,TruthValue.NULL)));
assertEquals(TruthValue.YES_NO,sarg.evaluate(values(TruthValue.NO,TruthValue.YES_NO,TruthValue.YES,TruthValue.YES_NO)));
assertEquals(TruthValue.NO_NULL,sarg.evaluate(values(TruthValue.NO,TruthValue.YES_NULL,TruthValue.YES,TruthValue.NO_NULL)));
assertEquals(TruthValue.YES_NULL,sarg.evaluate(values(TruthValue.YES_NULL,TruthValue.YES_NO_NULL,TruthValue.YES,TruthValue.NULL)));
assertEquals(TruthValue.YES_NO_NULL,sarg.evaluate(values(TruthValue.NO_NULL,TruthValue.YES_NO_NULL,TruthValue.YES,TruthValue.NO)));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testExpression3() throws Exception {
String exprStr=" \n" + " \n" + " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " boolean \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " false \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " id \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " int \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 23 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 45 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " first_name \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " string \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " alan \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " xxxxx \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 3 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " org.apache.hadoop.hive.ql.udf.UDFSubstr \n"+ " \n"+ " \n"+ " substr \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " first_name \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " smith \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " last_name \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " first_name \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 3 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " org.apache.hadoop.hive.ql.udf.UDFSubstr \n"+ " \n"+ " \n"+ " substr \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " yyy \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n";
SearchArgumentImpl sarg=(SearchArgumentImpl)ConvertAstToSearchArg.create(getFuncDesc(exprStr));
List leaves=sarg.getLeaves();
assertEquals(3,leaves.size());
String[] conditions=new String[]{"lt(id, 45)","not(lteq(id, 23))","eq(first_name, Binary{\"alan\"})","eq(last_name, Binary{\"smith\"})"};
MessageType schema=MessageTypeParser.parseMessageType("message test { required int32 id;" + " required binary first_name; required binary last_name;}");
FilterPredicate p=ParquetFilterPredicateConverter.toFilterPredicate(sarg,schema);
String expected=String.format("and(and(and(%1$s, %2$s), %3$s), %4$s)",conditions);
assertEquals(expected,p.toString());
PredicateLeaf leaf=leaves.get(0);
assertEquals(PredicateLeaf.Type.LONG,leaf.getType());
assertEquals(PredicateLeaf.Operator.BETWEEN,leaf.getOperator());
assertEquals("id",leaf.getColumnName());
assertEquals(null,leaf.getLiteral());
assertEquals(23L,leaf.getLiteralList().get(0));
assertEquals(45L,leaf.getLiteralList().get(1));
leaf=leaves.get(1);
assertEquals(PredicateLeaf.Type.STRING,leaf.getType());
assertEquals(PredicateLeaf.Operator.EQUALS,leaf.getOperator());
assertEquals("first_name",leaf.getColumnName());
assertEquals("alan",leaf.getLiteral());
leaf=leaves.get(2);
assertEquals(PredicateLeaf.Type.STRING,leaf.getType());
assertEquals(PredicateLeaf.Operator.EQUALS,leaf.getOperator());
assertEquals("last_name",leaf.getColumnName());
assertEquals("smith",leaf.getLiteral());
assertEquals("(and leaf-0 leaf-1 leaf-2)",sarg.getExpression().toString());
assertNoSharedNodes(sarg.getExpression(),Sets.newIdentityHashSet());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testExpression8() throws Exception {
String exprStr=" \n" + " \n" + " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " first_name \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " string \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " last_name \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " boolean \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " ";
SearchArgumentImpl sarg=(SearchArgumentImpl)ConvertAstToSearchArg.create(getFuncDesc(exprStr));
List leaves=sarg.getLeaves();
assertEquals(0,leaves.size());
MessageType schema=MessageTypeParser.parseMessageType("message test { required int32 id;" + " required binary first_name; }");
FilterPredicate p=ParquetFilterPredicateConverter.toFilterPredicate(sarg,schema);
assertNull(p);
assertEquals("YES_NO_NULL",sarg.getExpression().toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testExpression9() throws Exception {
String exprStr=" \n" + " \n" + " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " id \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " int \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 1 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 3 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " true \n"+ " \n"+ " \n"+ " org.apache.hadoop.hive.ql.udf.UDFOPPlus \n"+ " \n"+ " \n"+ " + \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 4 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " true \n"+ " \n"+ " \n"+ " org.apache.hadoop.hive.ql.udf.UDFOPPlus \n"+ " \n"+ " \n"+ " + \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " boolean \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " ";
SearchArgumentImpl sarg=(SearchArgumentImpl)ConvertAstToSearchArg.create(getFuncDesc(exprStr));
List leaves=sarg.getLeaves();
assertEquals(0,leaves.size());
assertEquals("YES_NO_NULL",sarg.getExpression().toString());
assertEquals(TruthValue.YES_NO_NULL,sarg.evaluate(values()));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testExpression1() throws Exception {
String exprStr=" \n" + " \n" + " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " first_name \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " string \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " john \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " boolean \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " greg \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " first_name \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " alan \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " first_name \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " id \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " int \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 12 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 13 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " id \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " id \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 15 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 16 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " id \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " id \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " 30 \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " first_name \n"+ " \n"+ " \n"+ " orc_people \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " owen \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n"+ " \n";
SearchArgumentImpl sarg=(SearchArgumentImpl)ConvertAstToSearchArg.create(getFuncDesc(exprStr));
List leaves=sarg.getLeaves();
assertEquals(9,leaves.size());
MessageType schema=MessageTypeParser.parseMessageType("message test { required int32 id;" + " required binary first_name; }");
FilterPredicate p=ParquetFilterPredicateConverter.toFilterPredicate(sarg,schema);
String[] conditions=new String[]{"eq(first_name, Binary{\"john\"})","not(lteq(first_name, Binary{\"greg\"}))","lt(first_name, Binary{\"alan\"})","not(lteq(id, 12))","not(lteq(id, 13))","lt(id, 15)","lt(id, 16)","eq(id, 30)","eq(first_name, Binary{\"owen\"})"};
String expected=String.format("and(or(or(or(or(or(or(or(%1$s, %2$s), %3$s), %4$s), %5$s), %6$s), %7$s), %8$s), " + "or(or(or(or(or(or(or(%1$s, %2$s), %3$s), %4$s), %5$s), %6$s), %7$s), %9$s))",conditions);
assertEquals(expected,p.toString());
PredicateLeaf leaf=leaves.get(0);
assertEquals(PredicateLeaf.Type.STRING,leaf.getType());
assertEquals(PredicateLeaf.Operator.EQUALS,leaf.getOperator());
assertEquals("first_name",leaf.getColumnName());
assertEquals("john",leaf.getLiteral());
leaf=leaves.get(1);
assertEquals(PredicateLeaf.Type.STRING,leaf.getType());
assertEquals(PredicateLeaf.Operator.LESS_THAN_EQUALS,leaf.getOperator());
assertEquals("first_name",leaf.getColumnName());
assertEquals("greg",leaf.getLiteral());
leaf=leaves.get(2);
assertEquals(PredicateLeaf.Type.STRING,leaf.getType());
assertEquals(PredicateLeaf.Operator.LESS_THAN,leaf.getOperator());
assertEquals("first_name",leaf.getColumnName());
assertEquals("alan",leaf.getLiteral());
leaf=leaves.get(3);
assertEquals(PredicateLeaf.Type.LONG,leaf.getType());
assertEquals(PredicateLeaf.Operator.LESS_THAN_EQUALS,leaf.getOperator());
assertEquals("id",leaf.getColumnName());
assertEquals(12L,leaf.getLiteral());
leaf=leaves.get(4);
assertEquals(PredicateLeaf.Type.LONG,leaf.getType());
assertEquals(PredicateLeaf.Operator.LESS_THAN_EQUALS,leaf.getOperator());
assertEquals("id",leaf.getColumnName());
assertEquals(13L,leaf.getLiteral());
leaf=leaves.get(5);
assertEquals(PredicateLeaf.Type.LONG,leaf.getType());
assertEquals(PredicateLeaf.Operator.LESS_THAN,leaf.getOperator());
assertEquals("id",leaf.getColumnName());
assertEquals(15L,leaf.getLiteral());
leaf=leaves.get(6);
assertEquals(PredicateLeaf.Type.LONG,leaf.getType());
assertEquals(PredicateLeaf.Operator.LESS_THAN,leaf.getOperator());
assertEquals("id",leaf.getColumnName());
assertEquals(16L,leaf.getLiteral());
leaf=leaves.get(7);
assertEquals(PredicateLeaf.Type.LONG,leaf.getType());
assertEquals(PredicateLeaf.Operator.NULL_SAFE_EQUALS,leaf.getOperator());
assertEquals("id",leaf.getColumnName());
assertEquals(30L,leaf.getLiteral());
leaf=leaves.get(8);
assertEquals(PredicateLeaf.Type.STRING,leaf.getType());
assertEquals(PredicateLeaf.Operator.NULL_SAFE_EQUALS,leaf.getOperator());
assertEquals("first_name",leaf.getColumnName());
assertEquals("owen",leaf.getLiteral());
assertEquals("(and (or leaf-0 (not leaf-1) leaf-2 (not leaf-3)" + " (not leaf-4) leaf-5 leaf-6 leaf-7)" + " (or leaf-0 (not leaf-1) leaf-2 (not leaf-3)"+ " (not leaf-4) leaf-5 leaf-6 leaf-8))",sarg.getExpression().toString());
assertNoSharedNodes(sarg.getExpression(),Sets.newIdentityHashSet());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void TestFloatSarg() throws Exception {
String serialAst="AQEAamF2YS51dGlsLkFycmF5TGlz9AECAQFvcmcuYXBhY2hlLmhhZG9vcC5oaXZlLnFsLnBsYW4uRXh" + "wck5vZGVDb2x1bW5EZXPjAQFmbPQAAAFiaWdvcuMBAm9yZy5hcGFjaGUuaGFkb29wLmhpdmUuc2VyZG" + "UyLnR5cGVpbmZvLlByaW1pdGl2ZVR5cGVJbmbvAQFmbG9h9AEDb3JnLmFwYWNoZS5oYWRvb3AuaGl2Z"+ "S5xbC5wbGFuLkV4cHJOb2RlQ29uc3RhbnREZXPjAQECBwQ/jMzNAQRvcmcuYXBhY2hlLmhhZG9vcC5o"+ "aXZlLnFsLnVkZi5nZW5lcmljLkdlbmVyaWNVREZPUEVxdWHsAQAAAYI9AUVRVUHMAQVvcmcuYXBhY2h"+ "lLmhhZG9vcC5pby5Cb29sZWFuV3JpdGFibOUBAAABAgEBYm9vbGVh7g==";
SearchArgument sarg=new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst)).buildSearchArgument();
assertEquals("leaf-0",sarg.getExpression().toString());
assertEquals(1,sarg.getLeaves().size());
PredicateLeaf leaf=sarg.getLeaves().get(0);
assertEquals(PredicateLeaf.Type.FLOAT,leaf.getType());
assertEquals("(EQUALS flt 1.1)",leaf.toString());
}
Class: org.apache.hadoop.hive.ql.io.sarg.TestSearchArgumentImpl EqualityVerifier
@Test public void testBuilderComplexTypes2() throws Exception {
SearchArgument sarg=SearchArgumentFactory.newBuilder().startAnd().lessThan("x",PredicateLeaf.Type.DATE,Date.valueOf("2005-3-12")).lessThanEquals("y",PredicateLeaf.Type.STRING,new HiveChar("hi",10).toString()).equals("z",PredicateLeaf.Type.DECIMAL,new HiveDecimalWritable("1.0")).end().build();
assertEquals("leaf-0 = (LESS_THAN x 2005-03-12), " + "leaf-1 = (LESS_THAN_EQUALS y hi ), " + "leaf-2 = (EQUALS z 1), "+ "expr = (and leaf-0 leaf-1 leaf-2)",sarg.toString());
sarg=SearchArgumentFactory.newBuilder().startNot().startOr().isNull("x",PredicateLeaf.Type.LONG).between("y",PredicateLeaf.Type.DECIMAL,new HiveDecimalWritable("10"),new HiveDecimalWritable("20.0")).in("z",PredicateLeaf.Type.LONG,1L,2L,3L).nullSafeEquals("a",PredicateLeaf.Type.STRING,new HiveVarchar("stinger",100).toString()).end().end().build();
assertEquals("leaf-0 = (IS_NULL x), " + "leaf-1 = (BETWEEN y 10 20), " + "leaf-2 = (IN z 1 2 3), "+ "leaf-3 = (NULL_SAFE_EQUALS a stinger), "+ "expr = (and (not leaf-0) (not leaf-1) (not leaf-2) (not leaf-3))",sarg.toString());
}
EqualityVerifier
@Test public void testNotPushdown() throws Exception {
assertEquals("leaf-1",SearchArgumentImpl.BuilderImpl.pushDownNot(leaf(1)).toString());
assertEquals("(not leaf-1)",SearchArgumentImpl.BuilderImpl.pushDownNot(not(leaf(1))).toString());
assertEquals("leaf-1",SearchArgumentImpl.BuilderImpl.pushDownNot(not(not(leaf(1)))).toString());
assertEquals("(not leaf-1)",SearchArgumentImpl.BuilderImpl.pushDownNot(not(not(not(leaf(1))))).toString());
assertEquals("(or leaf-1 (not leaf-2))",SearchArgumentImpl.BuilderImpl.pushDownNot(not(and(not(leaf(1)),leaf(2)))).toString());
assertEquals("(and (not leaf-1) leaf-2)",SearchArgumentImpl.BuilderImpl.pushDownNot(not(or(leaf(1),not(leaf(2))))).toString());
assertEquals("(or (or (not leaf-1) leaf-2) leaf-3)",SearchArgumentImpl.BuilderImpl.pushDownNot(or(not(and(leaf(1),not(leaf(2)))),not(not(leaf(3))))).toString());
assertEquals("NO",SearchArgumentImpl.BuilderImpl.pushDownNot(not(constant(TruthValue.YES))).toString());
assertEquals("YES",SearchArgumentImpl.BuilderImpl.pushDownNot(not(constant(TruthValue.NO))).toString());
assertEquals("NULL",SearchArgumentImpl.BuilderImpl.pushDownNot(not(constant(TruthValue.NULL))).toString());
assertEquals("YES_NO",SearchArgumentImpl.BuilderImpl.pushDownNot(not(constant(TruthValue.YES_NO))).toString());
assertEquals("YES_NULL",SearchArgumentImpl.BuilderImpl.pushDownNot(not(constant(TruthValue.NO_NULL))).toString());
assertEquals("NO_NULL",SearchArgumentImpl.BuilderImpl.pushDownNot(not(constant(TruthValue.YES_NULL))).toString());
assertEquals("YES_NO_NULL",SearchArgumentImpl.BuilderImpl.pushDownNot(not(constant(TruthValue.YES_NO_NULL))).toString());
}
EqualityVerifier
@Test public void testBuilder() throws Exception {
SearchArgument sarg=SearchArgumentFactory.newBuilder().startAnd().lessThan("x",PredicateLeaf.Type.LONG,10L).lessThanEquals("y",PredicateLeaf.Type.STRING,"hi").equals("z",PredicateLeaf.Type.FLOAT,1.0).end().build();
assertEquals("leaf-0 = (LESS_THAN x 10), " + "leaf-1 = (LESS_THAN_EQUALS y hi), " + "leaf-2 = (EQUALS z 1.0), "+ "expr = (and leaf-0 leaf-1 leaf-2)",sarg.toString());
sarg=SearchArgumentFactory.newBuilder().startNot().startOr().isNull("x",PredicateLeaf.Type.LONG).between("y",PredicateLeaf.Type.LONG,10L,20L).in("z",PredicateLeaf.Type.LONG,1L,2L,3L).nullSafeEquals("a",PredicateLeaf.Type.STRING,"stinger").end().end().build();
assertEquals("leaf-0 = (IS_NULL x), " + "leaf-1 = (BETWEEN y 10 20), " + "leaf-2 = (IN z 1 2 3), "+ "leaf-3 = (NULL_SAFE_EQUALS a stinger), "+ "expr = (and (not leaf-0) (not leaf-1) (not leaf-2) (not leaf-3))",sarg.toString());
}
EqualityVerifier
@Test public void testFlatten() throws Exception {
assertEquals("leaf-1",SearchArgumentImpl.BuilderImpl.flatten(leaf(1)).toString());
assertEquals("NO",SearchArgumentImpl.BuilderImpl.flatten(constant(TruthValue.NO)).toString());
assertEquals("(not (not leaf-1))",SearchArgumentImpl.BuilderImpl.flatten(not(not(leaf(1)))).toString());
assertEquals("(and leaf-1 leaf-2)",SearchArgumentImpl.BuilderImpl.flatten(and(leaf(1),leaf(2))).toString());
assertEquals("(and (or leaf-1 leaf-2) leaf-3)",SearchArgumentImpl.BuilderImpl.flatten(and(or(leaf(1),leaf(2)),leaf(3))).toString());
assertEquals("(and leaf-1 leaf-2 leaf-3 leaf-4)",SearchArgumentImpl.BuilderImpl.flatten(and(and(leaf(1),leaf(2)),and(leaf(3),leaf(4)))).toString());
assertEquals("(or leaf-1 leaf-2 leaf-3 leaf-4)",SearchArgumentImpl.BuilderImpl.flatten(or(leaf(1),or(leaf(2),or(leaf(3),leaf(4))))).toString());
assertEquals("(or leaf-1 leaf-2 leaf-3 leaf-4)",SearchArgumentImpl.BuilderImpl.flatten(or(or(or(leaf(1),leaf(2)),leaf(3)),leaf(4))).toString());
assertEquals("(or leaf-1 leaf-2 leaf-3 leaf-4 leaf-5 leaf-6)",SearchArgumentImpl.BuilderImpl.flatten(or(or(leaf(1),or(leaf(2),leaf(3))),or(or(leaf(4),leaf(5)),leaf(6)))).toString());
assertEquals("(and (not leaf-1) leaf-2 (not leaf-3) leaf-4 (not leaf-5) leaf-6)",SearchArgumentImpl.BuilderImpl.flatten(and(and(not(leaf(1)),and(leaf(2),not(leaf(3)))),and(and(leaf(4),not(leaf(5))),leaf(6)))).toString());
assertEquals("(not (and leaf-1 leaf-2 leaf-3))",SearchArgumentImpl.BuilderImpl.flatten(not(and(leaf(1),and(leaf(2),leaf(3))))).toString());
}
EqualityVerifier
@Test public void testBuilderComplexTypes() throws Exception {
SearchArgument sarg=SearchArgumentFactory.newBuilder().startAnd().lessThan("x",PredicateLeaf.Type.DATE,Date.valueOf("1970-1-11")).lessThanEquals("y",PredicateLeaf.Type.STRING,new HiveChar("hi",10).toString()).equals("z",PredicateLeaf.Type.DECIMAL,new HiveDecimalWritable("1.0")).end().build();
assertEquals("leaf-0 = (LESS_THAN x 1970-01-11), " + "leaf-1 = (LESS_THAN_EQUALS y hi ), " + "leaf-2 = (EQUALS z 1), "+ "expr = (and leaf-0 leaf-1 leaf-2)",sarg.toString());
sarg=SearchArgumentFactory.newBuilder().startNot().startOr().isNull("x",PredicateLeaf.Type.LONG).between("y",PredicateLeaf.Type.DECIMAL,new HiveDecimalWritable("10"),new HiveDecimalWritable("20.0")).in("z",PredicateLeaf.Type.LONG,1L,2L,3L).nullSafeEquals("a",PredicateLeaf.Type.STRING,new HiveVarchar("stinger",100).toString()).end().end().build();
assertEquals("leaf-0 = (IS_NULL x), " + "leaf-1 = (BETWEEN y 10 20), " + "leaf-2 = (IN z 1 2 3), "+ "leaf-3 = (NULL_SAFE_EQUALS a stinger), "+ "expr = (and (not leaf-0) (not leaf-1) (not leaf-2) (not leaf-3))",sarg.toString());
}
EqualityVerifier
@Test public void testFoldMaybe() throws Exception {
assertEquals("(and leaf-1)",SearchArgumentImpl.BuilderImpl.foldMaybe(and(leaf(1),constant(TruthValue.YES_NO_NULL))).toString());
assertEquals("(and leaf-1 leaf-2)",SearchArgumentImpl.BuilderImpl.foldMaybe(and(leaf(1),constant(TruthValue.YES_NO_NULL),leaf(2))).toString());
assertEquals("(and leaf-1 leaf-2)",SearchArgumentImpl.BuilderImpl.foldMaybe(and(constant(TruthValue.YES_NO_NULL),leaf(1),leaf(2),constant(TruthValue.YES_NO_NULL))).toString());
assertEquals("YES_NO_NULL",SearchArgumentImpl.BuilderImpl.foldMaybe(and(constant(TruthValue.YES_NO_NULL),constant(TruthValue.YES_NO_NULL))).toString());
assertEquals("YES_NO_NULL",SearchArgumentImpl.BuilderImpl.foldMaybe(or(leaf(1),constant(TruthValue.YES_NO_NULL))).toString());
assertEquals("(or leaf-1 (and leaf-2))",SearchArgumentImpl.BuilderImpl.foldMaybe(or(leaf(1),and(leaf(2),constant(TruthValue.YES_NO_NULL)))).toString());
assertEquals("(and leaf-1)",SearchArgumentImpl.BuilderImpl.foldMaybe(and(or(leaf(2),constant(TruthValue.YES_NO_NULL)),leaf(1))).toString());
assertEquals("(and leaf-100)",SearchArgumentImpl.BuilderImpl.foldMaybe(SearchArgumentImpl.BuilderImpl.convertToCNF(and(leaf(100),or(and(leaf(0),leaf(1)),and(leaf(2),leaf(3)),and(leaf(4),leaf(5)),and(leaf(6),leaf(7)),and(leaf(8),leaf(9)),and(leaf(10),leaf(11)),and(leaf(12),leaf(13)),and(leaf(14),leaf(15)),and(leaf(16),leaf(17)))))).toString());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testTimestampSerialization() throws Exception {
Timestamp now=new Timestamp(new java.util.Date().getTime());
SearchArgument sarg=SearchArgumentFactory.newBuilder().startAnd().lessThan("x",PredicateLeaf.Type.TIMESTAMP,now).end().build();
String serializedSarg=TestInputOutputFormat.toKryo(sarg);
SearchArgument sarg2=ConvertAstToSearchArg.create(serializedSarg);
Field literalField=PredicateLeafImpl.class.getDeclaredField("literal");
literalField.setAccessible(true);
assertTrue(literalField.get(sarg2.getLeaves().get(0)) instanceof java.util.Date);
Timestamp ts=(Timestamp)sarg2.getLeaves().get(0).getLiteral();
assertEquals(ts,now);
}
EqualityVerifier
@Test public void testCNF() throws Exception {
assertEquals("leaf-1",SearchArgumentImpl.BuilderImpl.convertToCNF(leaf(1)).toString());
assertEquals("NO",SearchArgumentImpl.BuilderImpl.convertToCNF(constant(TruthValue.NO)).toString());
assertEquals("(not leaf-1)",SearchArgumentImpl.BuilderImpl.convertToCNF(not(leaf(1))).toString());
assertEquals("(and leaf-1 leaf-2)",SearchArgumentImpl.BuilderImpl.convertToCNF(and(leaf(1),leaf(2))).toString());
assertEquals("(or (not leaf-1) leaf-2)",SearchArgumentImpl.BuilderImpl.convertToCNF(or(not(leaf(1)),leaf(2))).toString());
assertEquals("(and (or leaf-1 leaf-2) (not leaf-3))",SearchArgumentImpl.BuilderImpl.convertToCNF(and(or(leaf(1),leaf(2)),not(leaf(3)))).toString());
assertEquals("(and (or leaf-1 leaf-3) (or leaf-2 leaf-3)" + " (or leaf-1 leaf-4) (or leaf-2 leaf-4))",SearchArgumentImpl.BuilderImpl.convertToCNF(or(and(leaf(1),leaf(2)),and(leaf(3),leaf(4)))).toString());
assertEquals("(and" + " (or leaf-1 leaf-5) (or leaf-2 leaf-5)" + " (or leaf-3 leaf-5) (or leaf-4 leaf-5)"+ " (or leaf-1 leaf-6) (or leaf-2 leaf-6)"+ " (or leaf-3 leaf-6) (or leaf-4 leaf-6))",SearchArgumentImpl.BuilderImpl.convertToCNF(or(and(leaf(1),leaf(2),leaf(3),leaf(4)),and(leaf(5),leaf(6)))).toString());
assertEquals("(and" + " (or leaf-5 leaf-6 (not leaf-7) leaf-1 leaf-3)" + " (or leaf-5 leaf-6 (not leaf-7) leaf-2 leaf-3)"+ " (or leaf-5 leaf-6 (not leaf-7) leaf-1 leaf-4)"+ " (or leaf-5 leaf-6 (not leaf-7) leaf-2 leaf-4))",SearchArgumentImpl.BuilderImpl.convertToCNF(or(and(leaf(1),leaf(2)),and(leaf(3),leaf(4)),or(leaf(5),leaf(6)),not(leaf(7)))).toString());
assertEquals("(and" + " (or leaf-8 leaf-0 leaf-3 leaf-6)" + " (or leaf-8 leaf-1 leaf-3 leaf-6)"+ " (or leaf-8 leaf-2 leaf-3 leaf-6)"+ " (or leaf-8 leaf-0 leaf-4 leaf-6)"+ " (or leaf-8 leaf-1 leaf-4 leaf-6)"+ " (or leaf-8 leaf-2 leaf-4 leaf-6)"+ " (or leaf-8 leaf-0 leaf-5 leaf-6)"+ " (or leaf-8 leaf-1 leaf-5 leaf-6)"+ " (or leaf-8 leaf-2 leaf-5 leaf-6)"+ " (or leaf-8 leaf-0 leaf-3 leaf-7)"+ " (or leaf-8 leaf-1 leaf-3 leaf-7)"+ " (or leaf-8 leaf-2 leaf-3 leaf-7)"+ " (or leaf-8 leaf-0 leaf-4 leaf-7)"+ " (or leaf-8 leaf-1 leaf-4 leaf-7)"+ " (or leaf-8 leaf-2 leaf-4 leaf-7)"+ " (or leaf-8 leaf-0 leaf-5 leaf-7)"+ " (or leaf-8 leaf-1 leaf-5 leaf-7)"+ " (or leaf-8 leaf-2 leaf-5 leaf-7))",SearchArgumentImpl.BuilderImpl.convertToCNF(or(and(leaf(0),leaf(1),leaf(2)),and(leaf(3),leaf(4),leaf(5)),and(leaf(6),leaf(7)),leaf(8))).toString());
assertEquals("YES_NO_NULL",SearchArgumentImpl.BuilderImpl.convertToCNF(or(and(leaf(0),leaf(1)),and(leaf(2),leaf(3)),and(leaf(4),leaf(5)),and(leaf(6),leaf(7)),and(leaf(8),leaf(9)),and(leaf(10),leaf(11)),and(leaf(12),leaf(13)),and(leaf(14),leaf(15)),and(leaf(16),leaf(17)))).toString());
assertEquals("(and leaf-100 YES_NO_NULL)",SearchArgumentImpl.BuilderImpl.convertToCNF(and(leaf(100),or(and(leaf(0),leaf(1)),and(leaf(2),leaf(3)),and(leaf(4),leaf(5)),and(leaf(6),leaf(7)),and(leaf(8),leaf(9)),and(leaf(10),leaf(11)),and(leaf(12),leaf(13)),and(leaf(14),leaf(15)),and(leaf(16),leaf(17))))).toString());
assertNoSharedNodes(SearchArgumentImpl.BuilderImpl.convertToCNF(or(and(leaf(0),leaf(1),leaf(2)),and(leaf(3),leaf(4),leaf(5)),and(leaf(6),leaf(7)),leaf(8))),Sets.newIdentityHashSet());
}
EqualityVerifier
@Test public void testBuilderFloat() throws Exception {
SearchArgument sarg=SearchArgumentFactory.newBuilder().startAnd().lessThan("x",PredicateLeaf.Type.LONG,22L).lessThan("x1",PredicateLeaf.Type.LONG,22L).lessThanEquals("y",PredicateLeaf.Type.STRING,new HiveChar("hi",10).toString()).equals("z",PredicateLeaf.Type.FLOAT,new Double(0.22)).equals("z1",PredicateLeaf.Type.FLOAT,new Double(0.22)).end().build();
assertEquals("leaf-0 = (LESS_THAN x 22), " + "leaf-1 = (LESS_THAN x1 22), " + "leaf-2 = (LESS_THAN_EQUALS y hi ), "+ "leaf-3 = (EQUALS z 0.22), "+ "leaf-4 = (EQUALS z1 0.22), "+ "expr = (and leaf-0 leaf-1 leaf-2 leaf-3 leaf-4)",sarg.toString());
}
Class: org.apache.hadoop.hive.ql.lib.TestRuleRegExp UtilityVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testPatternWithWildCardChar(){
RuleRegExp rule1=new RuleRegExp("R1","(" + TableScanOperator.getOperatorName() + "%"+ FilterOperator.getOperatorName()+ "%)|("+ TableScanOperator.getOperatorName()+ "%"+ FileSinkOperator.getOperatorName()+ "%)");
assertEquals(rule1.rulePatternIsValidWithoutWildCardChar(),false);
assertEquals(rule1.rulePatternIsValidWithWildCardChar(),true);
Stack ns1=new Stack();
ns1.push(new TestNode(TableScanOperator.getOperatorName()));
ns1.push(new TestNode(FilterOperator.getOperatorName()));
Stack ns2=new Stack();
ns2.push(new TestNode(TableScanOperator.getOperatorName()));
ns2.push(new TestNode(FileSinkOperator.getOperatorName()));
try {
assertNotEquals(rule1.cost(ns1),-1);
assertNotEquals(rule1.cost(ns2),-1);
}
catch ( SemanticException e) {
fail(e.getMessage());
}
Stack ns3=new Stack();
ns3.push(new TestNode(ReduceSinkOperator.getOperatorName()));
ns3.push(new TestNode(ReduceSinkOperator.getOperatorName()));
ns3.push(new TestNode(FileSinkOperator.getOperatorName()));
try {
assertEquals(rule1.cost(ns3),-1);
}
catch ( SemanticException e) {
fail(e.getMessage());
}
}
UtilityVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testPatternWithoutWildCardChar(){
String patternStr=ReduceSinkOperator.getOperatorName() + "%" + SelectOperator.getOperatorName()+ "%"+ FileSinkOperator.getOperatorName()+ "%";
RuleRegExp rule1=new RuleRegExp("R1",patternStr);
assertEquals(rule1.rulePatternIsValidWithoutWildCardChar(),true);
assertEquals(rule1.rulePatternIsValidWithWildCardChar(),false);
Stack ns1=new Stack();
ns1.push(new TestNode(ReduceSinkOperator.getOperatorName()));
ns1.push(new TestNode(SelectOperator.getOperatorName()));
ns1.push(new TestNode(FileSinkOperator.getOperatorName()));
try {
assertEquals(rule1.cost(ns1),patternStr.length());
}
catch ( SemanticException e) {
fail(e.getMessage());
}
Stack ns2=new Stack();
ns2.push(new TestNode(ReduceSinkOperator.getOperatorName()));
ns1.push(new TestNode(TableScanOperator.getOperatorName()));
ns1.push(new TestNode(FileSinkOperator.getOperatorName()));
try {
assertEquals(rule1.cost(ns2),-1);
}
catch ( SemanticException e) {
fail(e.getMessage());
}
}
Class: org.apache.hadoop.hive.ql.lockmgr.TestDbTxnManager InternalCallVerifier EqualityVerifier
@Test public void testSingleWritePartition() throws Exception {
WriteEntity we=addPartitionOutput(newTable(true),WriteEntity.WriteType.INSERT);
QueryPlan qp=new MockQueryPlan(this);
txnMgr.openTxn("fred");
txnMgr.acquireLocks(qp,ctx,"fred");
List locks=ctx.getHiveLocks();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(1,TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock)locks.get(0)).lockId));
txnMgr.commitTxn();
locks=txnMgr.getLockManager().getLocks(false,false);
Assert.assertEquals(0,locks.size());
}
InternalCallVerifier EqualityVerifier
@Test public void testDelete() throws Exception {
WriteEntity we=addTableOutput(WriteEntity.WriteType.DELETE);
QueryPlan qp=new MockQueryPlan(this);
txnMgr.openTxn("fred");
txnMgr.acquireLocks(qp,ctx,"fred");
List locks=ctx.getHiveLocks();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(1,TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock)locks.get(0)).lockId));
txnMgr.commitTxn();
locks=txnMgr.getLockManager().getLocks(false,false);
Assert.assertEquals(0,locks.size());
}
InternalCallVerifier EqualityVerifier
@Test public void testSingleReadTable() throws Exception {
addTableInput();
QueryPlan qp=new MockQueryPlan(this);
txnMgr.acquireLocks(qp,ctx,"fred");
List locks=ctx.getHiveLocks();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(1,TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock)locks.get(0)).lockId));
txnMgr.getLockManager().unlock(locks.get(0));
locks=txnMgr.getLockManager().getLocks(false,false);
Assert.assertEquals(0,locks.size());
}
InternalCallVerifier EqualityVerifier
@Test public void testSingleReadPartition() throws Exception {
addPartitionInput(newTable(true));
QueryPlan qp=new MockQueryPlan(this);
txnMgr.acquireLocks(qp,ctx,null);
List locks=ctx.getHiveLocks();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(1,TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock)locks.get(0)).lockId));
txnMgr.getLockManager().unlock(locks.get(0));
locks=txnMgr.getLockManager().getLocks(false,false);
Assert.assertEquals(0,locks.size());
}
InternalCallVerifier EqualityVerifier
@Test public void testRollback() throws Exception {
WriteEntity we=addTableOutput(WriteEntity.WriteType.DELETE);
QueryPlan qp=new MockQueryPlan(this);
txnMgr.openTxn("fred");
txnMgr.acquireLocks(qp,ctx,"fred");
List locks=ctx.getHiveLocks();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(1,TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock)locks.get(0)).lockId));
txnMgr.rollbackTxn();
locks=txnMgr.getLockManager().getLocks(false,false);
Assert.assertEquals(0,locks.size());
}
InternalCallVerifier EqualityVerifier
@Test public void testUpdate() throws Exception {
WriteEntity we=addTableOutput(WriteEntity.WriteType.UPDATE);
QueryPlan qp=new MockQueryPlan(this);
txnMgr.openTxn("fred");
txnMgr.acquireLocks(qp,ctx,"fred");
List locks=ctx.getHiveLocks();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(1,TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock)locks.get(0)).lockId));
txnMgr.commitTxn();
locks=txnMgr.getLockManager().getLocks(false,false);
Assert.assertEquals(0,locks.size());
}
InternalCallVerifier EqualityVerifier
@Test public void testSingleReadMultiPartition() throws Exception {
Table t=newTable(true);
addPartitionInput(t);
addPartitionInput(t);
addPartitionInput(t);
QueryPlan qp=new MockQueryPlan(this);
txnMgr.acquireLocks(qp,ctx,"fred");
List locks=ctx.getHiveLocks();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(3,TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock)locks.get(0)).lockId));
txnMgr.getLockManager().unlock(locks.get(0));
locks=txnMgr.getLockManager().getLocks(false,false);
Assert.assertEquals(0,locks.size());
}
InternalCallVerifier EqualityVerifier
@Test public void testDDLExclusive() throws Exception {
WriteEntity we=addTableOutput(WriteEntity.WriteType.DDL_EXCLUSIVE);
QueryPlan qp=new MockQueryPlan(this);
txnMgr.acquireLocks(qp,ctx,"fred");
List locks=ctx.getHiveLocks();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(1,TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock)locks.get(0)).lockId));
txnMgr.getLockManager().unlock(locks.get(0));
locks=txnMgr.getLockManager().getLocks(false,false);
Assert.assertEquals(0,locks.size());
}
InternalCallVerifier EqualityVerifier
@Test public void testSingleWriteTable() throws Exception {
WriteEntity we=addTableOutput(WriteEntity.WriteType.INSERT);
QueryPlan qp=new MockQueryPlan(this);
txnMgr.openTxn("fred");
txnMgr.acquireLocks(qp,ctx,"fred");
List locks=ctx.getHiveLocks();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(1,TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock)locks.get(0)).lockId));
txnMgr.commitTxn();
locks=txnMgr.getLockManager().getLocks(false,false);
Assert.assertEquals(0,locks.size());
}
InternalCallVerifier EqualityVerifier
@Test public void testLockAcquisitionAndRelease() throws Exception {
addTableInput();
QueryPlan qp=new MockQueryPlan(this);
txnMgr.acquireLocks(qp,ctx,"fred");
List locks=ctx.getHiveLocks();
Assert.assertEquals(1,locks.size());
txnMgr.releaseLocks(locks);
locks=txnMgr.getLockManager().getLocks(false,false);
Assert.assertEquals(0,locks.size());
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testWriteDynamicPartition() throws Exception {
WriteEntity we=addDynamicPartitionedOutput(newTable(true),WriteEntity.WriteType.INSERT);
QueryPlan qp=new MockQueryPlan(this);
txnMgr.openTxn("fred");
txnMgr.acquireLocks(qp,ctx,"fred");
List locks=ctx.getHiveLocks();
Assert.assertEquals(1,locks.size());
ShowLocksResponse rsp=((DbLockManager)txnMgr.getLockManager()).getLocks();
List elms=rsp.getLocks();
Assert.assertEquals(1,elms.size());
Assert.assertNotNull(elms.get(0).getTablename());
Assert.assertNull(elms.get(0).getPartname());
txnMgr.commitTxn();
locks=txnMgr.getLockManager().getLocks(false,false);
Assert.assertEquals(0,locks.size());
}
InternalCallVerifier EqualityVerifier
@Test public void testReadWrite() throws Exception {
Table t=newTable(true);
addPartitionInput(t);
addPartitionInput(t);
addPartitionInput(t);
WriteEntity we=addTableOutput(WriteEntity.WriteType.INSERT);
QueryPlan qp=new MockQueryPlan(this);
txnMgr.openTxn("fred");
txnMgr.acquireLocks(qp,ctx,"fred");
List locks=ctx.getHiveLocks();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(4,TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock)locks.get(0)).lockId));
txnMgr.commitTxn();
locks=txnMgr.getLockManager().getLocks(false,false);
Assert.assertEquals(0,locks.size());
}
InternalCallVerifier EqualityVerifier
@Test public void testDDLShared() throws Exception {
WriteEntity we=addTableOutput(WriteEntity.WriteType.DDL_SHARED);
QueryPlan qp=new MockQueryPlan(this);
txnMgr.acquireLocks(qp,ctx,"fred");
List locks=ctx.getHiveLocks();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(1,TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock)locks.get(0)).lockId));
txnMgr.getLockManager().unlock(locks.get(0));
locks=txnMgr.getLockManager().getLocks(false,false);
Assert.assertEquals(0,locks.size());
}
BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testHeartbeater() throws Exception {
Assert.assertTrue(txnMgr instanceof DbTxnManager);
addTableInput();
LockException exception=null;
QueryPlan qp=new MockQueryPlan(this);
txnMgr.openTxn("fred");
txnMgr.acquireLocks(qp,ctx,"fred");
runReaper();
try {
txnMgr.commitTxn();
}
catch ( LockException e) {
exception=e;
}
Assert.assertNull("Txn commit should be successful",exception);
exception=null;
txnMgr.openTxn("tom");
((DbTxnManager)txnMgr).acquireLocksWithHeartbeatDelay(qp,ctx,"tom",HiveConf.getTimeVar(conf,HiveConf.ConfVars.HIVE_TXN_TIMEOUT,TimeUnit.MILLISECONDS) / 2);
runReaper();
try {
txnMgr.commitTxn();
}
catch ( LockException e) {
exception=e;
}
Assert.assertNull("Txn commit should also be successful",exception);
exception=null;
txnMgr.openTxn("jerry");
((DbTxnManager)txnMgr).acquireLocks(qp,ctx,"jerry",true);
Thread.sleep(HiveConf.getTimeVar(conf,HiveConf.ConfVars.HIVE_TXN_TIMEOUT,TimeUnit.MILLISECONDS));
runReaper();
try {
txnMgr.commitTxn();
}
catch ( LockException e) {
exception=e;
}
Assert.assertNotNull("Txn should have been aborted",exception);
Assert.assertEquals(ErrorMsg.TXN_ABORTED,exception.getCanonicalErrorMsg());
}
EqualityVerifier NullVerifier HybridVerifier
@Test public void testExceptions() throws Exception {
addPartitionOutput(newTable(true),WriteEntity.WriteType.INSERT);
QueryPlan qp=new MockQueryPlan(this);
txnMgr.openTxn("NicholasII");
Thread.sleep(HiveConf.getTimeVar(conf,HiveConf.ConfVars.HIVE_TXN_TIMEOUT,TimeUnit.MILLISECONDS));
runReaper();
LockException exception=null;
try {
txnMgr.commitTxn();
}
catch ( LockException ex) {
exception=ex;
}
Assert.assertNotNull("Expected exception1",exception);
Assert.assertEquals("Wrong Exception1",ErrorMsg.TXN_ABORTED,exception.getCanonicalErrorMsg());
exception=null;
txnMgr.openTxn("AlexanderIII");
Thread.sleep(HiveConf.getTimeVar(conf,HiveConf.ConfVars.HIVE_TXN_TIMEOUT,TimeUnit.MILLISECONDS));
runReaper();
try {
txnMgr.rollbackTxn();
}
catch ( LockException ex) {
exception=ex;
}
Assert.assertNotNull("Expected exception2",exception);
Assert.assertEquals("Wrong Exception2",ErrorMsg.TXN_NO_SUCH_TRANSACTION,exception.getCanonicalErrorMsg());
}
InternalCallVerifier EqualityVerifier
@Test public void testJoin() throws Exception {
Table t=newTable(true);
addPartitionInput(t);
addPartitionInput(t);
addPartitionInput(t);
addTableInput();
QueryPlan qp=new MockQueryPlan(this);
txnMgr.acquireLocks(qp,ctx,"fred");
List locks=ctx.getHiveLocks();
Assert.assertEquals(1,locks.size());
Assert.assertEquals(4,TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock)locks.get(0)).lockId));
txnMgr.getLockManager().unlock(locks.get(0));
locks=txnMgr.getLockManager().getLocks(false,false);
Assert.assertEquals(0,locks.size());
}
Class: org.apache.hadoop.hive.ql.lockmgr.TestDbTxnManager2 EqualityVerifier
@Test public void insertOverwritePartitionedCreate() throws Exception {
CommandProcessorResponse cpr=driver.run("create table if not exists T4 (name string, gpa double) partitioned by (age int)");
checkCmdOnDriver(cpr);
cpr=driver.run("create table if not exists T5(name string, age int, gpa double)");
checkCmdOnDriver(cpr);
cpr=driver.compileAndRespond("INSERT OVERWRITE TABLE T4 PARTITION (age) SELECT name, age, gpa FROM T5");
checkCmdOnDriver(cpr);
txnMgr.acquireLocks(driver.getPlan(),ctx,"Fifer");
List locks=getLocks();
Assert.assertEquals("Unexpected lock count",2,locks.size());
checkLock(LockType.SHARED_READ,LockState.ACQUIRED,"default","T5",null,locks.get(0));
checkLock(LockType.EXCLUSIVE,LockState.ACQUIRED,"default","T4",null,locks.get(1));
txnMgr.getLockManager().releaseLocks(ctx.getHiveLocks());
Assert.assertEquals("Lock remained",0,getLocks().size());
cpr=driver.run("drop table if exists T5");
checkCmdOnDriver(cpr);
cpr=driver.run("drop table if exists T4");
checkCmdOnDriver(cpr);
}
EqualityVerifier
@Test public void insertOverwriteCreate() throws Exception {
CommandProcessorResponse cpr=driver.run("create table if not exists T2(a int)");
checkCmdOnDriver(cpr);
cpr=driver.run("create table if not exists T3(a int)");
checkCmdOnDriver(cpr);
cpr=driver.compileAndRespond("insert overwrite table T3 select a from T2");
checkCmdOnDriver(cpr);
txnMgr.acquireLocks(driver.getPlan(),ctx,"Fifer");
List locks=getLocks();
Assert.assertEquals("Unexpected lock count",2,locks.size());
checkLock(LockType.SHARED_READ,LockState.ACQUIRED,"default","T2",null,locks.get(0));
checkLock(LockType.EXCLUSIVE,LockState.ACQUIRED,"default","T3",null,locks.get(1));
txnMgr.getLockManager().releaseLocks(ctx.getHiveLocks());
Assert.assertEquals("Lock remained",0,getLocks().size());
cpr=driver.run("drop table if exists T1");
checkCmdOnDriver(cpr);
cpr=driver.run("drop table if exists T2");
checkCmdOnDriver(cpr);
}
EqualityVerifier
@Test public void lockConflictDbTable() throws Exception {
CommandProcessorResponse cpr=driver.run("create database if not exists temp");
checkCmdOnDriver(cpr);
cpr=driver.run("create table if not exists temp.T7(a int, b int) clustered by(b) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')");
checkCmdOnDriver(cpr);
cpr=driver.compileAndRespond("update temp.T7 set a = 5 where b = 6");
checkCmdOnDriver(cpr);
txnMgr.acquireLocks(driver.getPlan(),ctx,"Fifer");
List updateLocks=ctx.getHiveLocks();
cpr=driver.compileAndRespond("drop database if exists temp");
LockState lockState=((DbTxnManager)txnMgr).acquireLocks(driver.getPlan(),ctx,"Fiddler",false);
List locks=getLocks();
Assert.assertEquals("Unexpected lock count",2,locks.size());
checkLock(LockType.SHARED_WRITE,LockState.ACQUIRED,"temp","T7",null,locks.get(0));
checkLock(LockType.EXCLUSIVE,LockState.WAITING,"temp",null,null,locks.get(1));
txnMgr.getLockManager().releaseLocks(updateLocks);
lockState=((DbLockManager)txnMgr.getLockManager()).checkLock(locks.get(1).getLockid());
locks=getLocks();
Assert.assertEquals("Unexpected lock count",1,locks.size());
checkLock(LockType.EXCLUSIVE,LockState.ACQUIRED,"temp",null,null,locks.get(0));
List xLock=new ArrayList(0);
xLock.add(new DbLockManager.DbHiveLock(locks.get(0).getLockid()));
txnMgr.getLockManager().releaseLocks(xLock);
}
EqualityVerifier
@Test public void createTable() throws Exception {
CommandProcessorResponse cpr=driver.compileAndRespond("create table if not exists T (a int, b int)");
checkCmdOnDriver(cpr);
txnMgr.acquireLocks(driver.getPlan(),ctx,"Fifer");
List locks=getLocks();
Assert.assertEquals("Unexpected lock count",1,locks.size());
checkLock(LockType.SHARED_READ,LockState.ACQUIRED,"default",null,null,locks.get(0));
txnMgr.getLockManager().releaseLocks(ctx.getHiveLocks());
Assert.assertEquals("Lock remained",0,getLocks().size());
}
EqualityVerifier
@Test public void updateSelectUpdate() throws Exception {
CommandProcessorResponse cpr=driver.run("create table T8(a int, b int) clustered by(b) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')");
checkCmdOnDriver(cpr);
cpr=driver.compileAndRespond("delete from T8 where b = 89");
checkCmdOnDriver(cpr);
txnMgr.acquireLocks(driver.getPlan(),ctx,"Fifer");
List deleteLocks=ctx.getHiveLocks();
cpr=driver.compileAndRespond("select a from T8");
checkCmdOnDriver(cpr);
txnMgr.acquireLocks(driver.getPlan(),ctx,"Fiddler");
cpr=driver.compileAndRespond("update T8 set a = 1 where b = 1");
checkCmdOnDriver(cpr);
LockState lockState=((DbTxnManager)txnMgr).acquireLocks(driver.getPlan(),ctx,"Practical",false);
List locks=getLocks();
Assert.assertEquals("Unexpected lock count",3,locks.size());
checkLock(LockType.SHARED_READ,LockState.ACQUIRED,"default","T8",null,locks.get(0));
checkLock(LockType.SHARED_WRITE,LockState.ACQUIRED,"default","T8",null,locks.get(1));
checkLock(LockType.SHARED_WRITE,LockState.WAITING,"default","T8",null,locks.get(2));
txnMgr.getLockManager().releaseLocks(deleteLocks);
lockState=((DbLockManager)txnMgr.getLockManager()).checkLock(locks.get(2).getLockid());
locks=getLocks();
Assert.assertEquals("Unexpected lock count",2,locks.size());
checkLock(LockType.SHARED_READ,LockState.ACQUIRED,"default","T8",null,locks.get(0));
checkLock(LockType.SHARED_WRITE,LockState.ACQUIRED,"default","T8",null,locks.get(1));
List relLocks=new ArrayList(2);
relLocks.add(new DbLockManager.DbHiveLock(locks.get(0).getLockid()));
relLocks.add(new DbLockManager.DbHiveLock(locks.get(1).getLockid()));
txnMgr.getLockManager().releaseLocks(relLocks);
cpr=driver.run("drop table if exists T6");
locks=getLocks();
Assert.assertEquals("Unexpected number of locks found",0,locks.size());
checkCmdOnDriver(cpr);
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testDummyTxnManagerOnAcidTable() throws Exception {
CommandProcessorResponse cpr=driver.run("create table T10 (a int, b int) clustered by(b) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')");
checkCmdOnDriver(cpr);
cpr=driver.run("create table T11 (a int, b int) clustered by(b) into 2 buckets stored as orc");
checkCmdOnDriver(cpr);
conf.setVar(HiveConf.ConfVars.HIVE_TXN_MANAGER,"org.apache.hadoop.hive.ql.lockmgr.DummyTxnManager");
txnMgr=SessionState.get().initTxnMgr(conf);
Assert.assertTrue(txnMgr instanceof DummyTxnManager);
cpr=driver.compileAndRespond("select * from T10");
Assert.assertEquals(ErrorMsg.TXNMGR_NOT_ACID.getErrorCode(),cpr.getResponseCode());
Assert.assertTrue(cpr.getErrorMessage().contains("This command is not allowed on an ACID table"));
cpr=driver.compileAndRespond("insert into table T10 values (1, 2)");
Assert.assertEquals(ErrorMsg.TXNMGR_NOT_ACID.getErrorCode(),cpr.getResponseCode());
Assert.assertTrue(cpr.getErrorMessage().contains("This command is not allowed on an ACID table"));
cpr=driver.compileAndRespond("insert overwrite table T10 select a, b from T11");
Assert.assertEquals(ErrorMsg.NO_INSERT_OVERWRITE_WITH_ACID.getErrorCode(),cpr.getResponseCode());
Assert.assertTrue(cpr.getErrorMessage().contains("INSERT OVERWRITE not allowed on table with OutputFormat" + " that implements AcidOutputFormat while transaction manager that supports ACID is in use"));
cpr=driver.compileAndRespond("update T10 set a=0 where b=1");
Assert.assertEquals(ErrorMsg.ACID_OP_ON_NONACID_TXNMGR.getErrorCode(),cpr.getResponseCode());
Assert.assertTrue(cpr.getErrorMessage().contains("Attempt to do update or delete using transaction manager that does not support these operations."));
cpr=driver.compileAndRespond("delete from T10");
Assert.assertEquals(ErrorMsg.ACID_OP_ON_NONACID_TXNMGR.getErrorCode(),cpr.getResponseCode());
Assert.assertTrue(cpr.getErrorMessage().contains("Attempt to do update or delete using transaction manager that does not support these operations."));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testLockRetryLimit() throws Exception {
conf.setIntVar(HiveConf.ConfVars.HIVE_LOCK_NUMRETRIES,2);
conf.setBoolVar(HiveConf.ConfVars.TXN_MGR_DUMP_LOCK_STATE_ON_ACQUIRE_TIMEOUT,true);
HiveTxnManager otherTxnMgr=new DbTxnManager();
((DbTxnManager)otherTxnMgr).setHiveConf(conf);
CommandProcessorResponse cpr=driver.run("create table T9(a int)");
checkCmdOnDriver(cpr);
cpr=driver.compileAndRespond("select * from T9");
checkCmdOnDriver(cpr);
txnMgr.acquireLocks(driver.getPlan(),ctx,"Vincent Vega");
List locks=getLocks(txnMgr);
Assert.assertEquals("Unexpected lock count",1,locks.size());
checkLock(LockType.SHARED_READ,LockState.ACQUIRED,"default","T9",null,locks.get(0));
cpr=driver.compileAndRespond("drop table T9");
checkCmdOnDriver(cpr);
try {
otherTxnMgr.acquireLocks(driver.getPlan(),ctx,"Winston Winnfield");
}
catch ( LockException ex) {
Assert.assertEquals("Got wrong lock exception",ErrorMsg.LOCK_ACQUIRE_TIMEDOUT,ex.getCanonicalErrorMsg());
}
locks=getLocks(txnMgr);
Assert.assertEquals("Unexpected lock count",1,locks.size());
checkLock(LockType.SHARED_READ,LockState.ACQUIRED,"default","T9",null,locks.get(0));
otherTxnMgr.closeTxnManager();
}
EqualityVerifier
@Test public void basicBlocking() throws Exception {
CommandProcessorResponse cpr=driver.run("create table if not exists T6(a int)");
checkCmdOnDriver(cpr);
cpr=driver.compileAndRespond("select a from T6");
checkCmdOnDriver(cpr);
txnMgr.acquireLocks(driver.getPlan(),ctx,"Fifer");
List selectLocks=ctx.getHiveLocks();
cpr=driver.compileAndRespond("drop table if exists T6");
checkCmdOnDriver(cpr);
LockState lockState=((DbTxnManager)txnMgr).acquireLocks(driver.getPlan(),ctx,"Fiddler",false);
List locks=getLocks();
Assert.assertEquals("Unexpected lock count",2,locks.size());
checkLock(LockType.SHARED_READ,LockState.ACQUIRED,"default","T6",null,locks.get(0));
checkLock(LockType.EXCLUSIVE,LockState.WAITING,"default","T6",null,locks.get(1));
txnMgr.getLockManager().releaseLocks(selectLocks);
lockState=((DbLockManager)txnMgr.getLockManager()).checkLock(locks.get(1).getLockid());
locks=getLocks();
Assert.assertEquals("Unexpected lock count",1,locks.size());
checkLock(LockType.EXCLUSIVE,LockState.ACQUIRED,"default","T6",null,locks.get(0));
List xLock=new ArrayList(0);
xLock.add(new DbLockManager.DbHiveLock(locks.get(0).getLockid()));
txnMgr.getLockManager().releaseLocks(xLock);
cpr=driver.run("drop table if exists T6");
locks=getLocks();
Assert.assertEquals("Unexpected number of locks found",0,locks.size());
checkCmdOnDriver(cpr);
}
Class: org.apache.hadoop.hive.ql.lockmgr.TestDummyTxnManager InternalCallVerifier EqualityVerifier
/**
* Verifies the current database object is not locked if the table read is against different database
* @throws Exception
*/
@Test public void testSingleReadTable() throws Exception {
SessionState.get().setCurrentDatabase("db1");
List expectedLocks=new ArrayList();
expectedLocks.add(new ZooKeeperHiveLock("default",new HiveLockObject(),HiveLockMode.SHARED));
expectedLocks.add(new ZooKeeperHiveLock("default.table1",new HiveLockObject(),HiveLockMode.SHARED));
when(mockLockManager.lock(anyListOf(HiveLockObj.class),eq(false))).thenReturn(expectedLocks);
doNothing().when(mockLockManager).setContext(any(HiveLockManagerCtx.class));
doNothing().when(mockLockManager).close();
ArgumentCaptor lockObjsCaptor=ArgumentCaptor.forClass(List.class);
when(mockQueryPlan.getInputs()).thenReturn(createReadEntities());
when(mockQueryPlan.getOutputs()).thenReturn(new HashSet());
txnMgr.acquireLocks(mockQueryPlan,ctx,"fred");
Assert.assertEquals("db1",SessionState.get().getCurrentDatabase());
List resultLocks=ctx.getHiveLocks();
Assert.assertEquals(expectedLocks.size(),resultLocks.size());
Assert.assertEquals(expectedLocks.get(0).getHiveLockMode(),resultLocks.get(0).getHiveLockMode());
Assert.assertEquals(expectedLocks.get(0).getHiveLockObject().getName(),resultLocks.get(0).getHiveLockObject().getName());
Assert.assertEquals(expectedLocks.get(1).getHiveLockMode(),resultLocks.get(1).getHiveLockMode());
Assert.assertEquals(expectedLocks.get(0).getHiveLockObject().getName(),resultLocks.get(0).getHiveLockObject().getName());
verify(mockLockManager).lock(lockObjsCaptor.capture(),eq(false));
List lockObjs=lockObjsCaptor.getValue();
Assert.assertEquals(2,lockObjs.size());
Assert.assertEquals("default",lockObjs.get(0).getName());
Assert.assertEquals(HiveLockMode.SHARED,lockObjs.get(0).mode);
Assert.assertEquals("default/table1",lockObjs.get(1).getName());
Assert.assertEquals(HiveLockMode.SHARED,lockObjs.get(1).mode);
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDedupLockObjects(){
List lockObjs=new ArrayList();
String path1="path1";
String path2="path2";
HiveLockObjectData lockData1=new HiveLockObjectData("query1","1","IMPLICIT","drop table table1");
HiveLockObjectData lockData2=new HiveLockObjectData("query1","1","IMPLICIT","drop table table1");
lockObjs.add(new HiveLockObj(new HiveLockObject(path1,lockData1),HiveLockMode.SHARED));
String name1=lockObjs.get(lockObjs.size() - 1).getName();
lockObjs.add(new HiveLockObj(new HiveLockObject(path1,lockData1),HiveLockMode.EXCLUSIVE));
lockObjs.add(new HiveLockObj(new HiveLockObject(path2,lockData2),HiveLockMode.SHARED));
String name2=lockObjs.get(lockObjs.size() - 1).getName();
lockObjs.add(new HiveLockObj(new HiveLockObject(path2,lockData2),HiveLockMode.SHARED));
lockObjs.add(new HiveLockObj(new HiveLockObject(path2,lockData2),HiveLockMode.SHARED));
DummyTxnManager.dedupLockObjects(lockObjs);
Assert.assertEquals("Locks should be deduped",2,lockObjs.size());
Comparator cmp=new Comparator(){
@Override public int compare( HiveLockObj lock1, HiveLockObj lock2){
return lock1.getName().compareTo(lock2.getName());
}
}
;
Collections.sort(lockObjs,cmp);
HiveLockObj lockObj=lockObjs.get(0);
Assert.assertEquals(name1,lockObj.getName());
Assert.assertEquals(HiveLockMode.EXCLUSIVE,lockObj.getMode());
lockObj=lockObjs.get(1);
Assert.assertEquals(name2,lockObj.getName());
Assert.assertEquals(HiveLockMode.SHARED,lockObj.getMode());
}
Class: org.apache.hadoop.hive.ql.lockmgr.TestHiveLockObject InternalCallVerifier EqualityVerifier
@Test public void testEqualsAndHashCode(){
HiveLockObjectData data1=new HiveLockObjectData("ID1","SHARED","1997-07-01","select * from mytable");
HiveLockObjectData data2=new HiveLockObjectData("ID1","SHARED","1997-07-01","select * from mytable");
Assert.assertEquals(data1,data2);
Assert.assertEquals(data1.hashCode(),data2.hashCode());
HiveLockObject obj1=new HiveLockObject("mytable",data1);
HiveLockObject obj2=new HiveLockObject("mytable",data2);
Assert.assertEquals(obj1,obj2);
Assert.assertEquals(obj1.hashCode(),obj2.hashCode());
}
Class: org.apache.hadoop.hive.ql.lockmgr.zookeeper.TestZookeeperLockManager InternalCallVerifier EqualityVerifier
@Test public void testGetQuorumServers(){
conf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_QUORUM,"node1");
conf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_CLIENT_PORT,"9999");
Assert.assertEquals("node1:9999",ZooKeeperHiveHelper.getQuorumServers(conf));
conf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_QUORUM,"node1,node2,node3");
conf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_CLIENT_PORT,"9999");
Assert.assertEquals("node1:9999,node2:9999,node3:9999",ZooKeeperHiveHelper.getQuorumServers(conf));
conf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_QUORUM,"node1:5666,node2,node3");
conf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_CLIENT_PORT,"9999");
Assert.assertEquals("node1:5666,node2:9999,node3:9999",ZooKeeperHiveHelper.getQuorumServers(conf));
}
APIUtilityVerifier UtilityVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testDeleteNoChildren() throws Exception {
client.create().creatingParentsIfNeeded().forPath(TABLE_LOCK_PATH,lockObjData.toString().getBytes());
byte[] data=client.getData().forPath(TABLE_LOCK_PATH);
Assert.assertArrayEquals(lockObjData.toString().getBytes(),data);
ZooKeeperHiveLockManager.unlockPrimitive(zLock,PARENT,client);
try {
data=client.getData().forPath(TABLE_LOCK_PATH);
Assert.fail();
}
catch ( Exception e) {
Assert.assertEquals(e instanceof KeeperException.NoNodeException,true);
}
try {
data=client.getData().forPath(PARENT_LOCK_PATH);
Assert.fail();
}
catch ( Exception e) {
Assert.assertEquals(e instanceof KeeperException.NoNodeException,true);
}
}
Class: org.apache.hadoop.hive.ql.log.TestLog4j2Appenders InternalCallVerifier EqualityVerifier
@Test public void testHiveEventCounterAppender() throws Exception {
Logger logger=LogManager.getRootLogger();
HiveEventCounter appender=HiveEventCounter.createInstance("EventCounter",true,null,null);
appender.addToLogger(logger.getName(),Level.INFO);
appender.start();
logger.info("Test");
logger.info("Test");
logger.info("Test");
logger.info("Test");
logger.error("Test");
logger.error("Test");
logger.error("Test");
logger.warn("Test");
logger.warn("Test");
logger.fatal("Test");
assertEquals(4,appender.getInfo());
assertEquals(3,appender.getError());
assertEquals(2,appender.getWarn());
assertEquals(1,appender.getFatal());
appender.removeFromLogger(LogManager.getRootLogger().getName());
}
InternalCallVerifier EqualityVerifier
@Test public void testStringAppender() throws Exception {
Logger logger=LogManager.getRootLogger();
StringAppender appender=StringAppender.createStringAppender("%m");
appender.addToLogger(logger.getName(),Level.INFO);
appender.start();
logger.info("Hello!");
logger.info(" World");
assertEquals("Hello! World",appender.getOutput());
appender.removeFromLogger(LogManager.getRootLogger().getName());
}
Class: org.apache.hadoop.hive.ql.optimizer.TestGenMapRedUtilsUsePartitionColumnsPositive APIUtilityVerifier EqualityVerifier
@Test public void testUsePartitionColumns(){
Properties p=new Properties();
p.setProperty(hive_metastoreConstants.META_TABLE_PARTITION_COLUMNS,inPartColNames);
p.setProperty(hive_metastoreConstants.META_TABLE_PARTITION_COLUMN_TYPES,inPartColTypes);
GenMapRedUtils.usePartitionColumns(p,Arrays.asList(partNamesToRetain.split(",")));
String actualNames=p.getProperty(hive_metastoreConstants.META_TABLE_PARTITION_COLUMNS);
String actualTypes=p.getProperty(hive_metastoreConstants.META_TABLE_PARTITION_COLUMN_TYPES);
assertEquals(expectedPartColNames,actualNames);
assertEquals(expectedPartColTypes,actualTypes);
}
Class: org.apache.hadoop.hive.ql.optimizer.calcite.TestCBORuleFiredOnlyOnce EqualityVerifier
@Test public void testRuleFiredOnlyOnce(){
HiveConf conf=new HiveConf();
HepProgramBuilder programBuilder=new HepProgramBuilder();
programBuilder.addMatchOrder(HepMatchOrder.TOP_DOWN);
programBuilder=programBuilder.addRuleCollection(ImmutableList.of(DummyRule.INSTANCE));
HiveRulesRegistry registry=new HiveRulesRegistry();
HivePlannerContext context=new HivePlannerContext(null,registry);
HepPlanner planner=new HepPlanner(programBuilder.build(),context);
RexBuilder rexBuilder=new RexBuilder(new JavaTypeFactoryImpl());
RelOptCluster cluster=RelOptCluster.create(planner,rexBuilder);
HiveDefaultRelMetadataProvider mdProvider=new HiveDefaultRelMetadataProvider(conf);
List list=Lists.newArrayList();
list.add(mdProvider.getMetadataProvider());
planner.registerMetadataProviders(list);
RelMetadataProvider chainedProvider=ChainedRelMetadataProvider.of(list);
final RelNode node=new DummyNode(cluster,cluster.traitSet());
node.getCluster().setMetadataProvider(new CachingRelMetadataProvider(chainedProvider,planner));
planner.setRoot(node);
planner.findBestExp();
assertEquals(3,DummyRule.INSTANCE.numberMatches);
assertEquals(1,DummyRule.INSTANCE.numberOnMatch);
}
Class: org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.TestDynamicMultiDimeCollection APIUtilityVerifier EqualityVerifier
@Test public void testFlat1() throws SemanticException {
List> uniqSkewedElements=new ArrayList>();
List v1=Arrays.asList("1","2");
List v2=Arrays.asList("3","4");
uniqSkewedElements.add(v1);
uniqSkewedElements.add(v2);
List> actual=ListBucketingPruner.DynamicMultiDimensionalCollection.flat(uniqSkewedElements);
List> expected=new ArrayList>();
v1=Arrays.asList("1","3");
v2=Arrays.asList("1","4");
List v3=Arrays.asList("2","3");
List v4=Arrays.asList("2","4");
expected.add(v1);
expected.add(v2);
expected.add(v3);
expected.add(v4);
Assert.assertEquals(expected,actual);
}
APIUtilityVerifier EqualityVerifier
@Test public void testUniqueElementsList2(){
List> values=new ArrayList>();
List v1=Arrays.asList("1","a","x");
values.add(v1);
List> actuals=ListBucketingPruner.DynamicMultiDimensionalCollection.uniqueElementsList(values,DEF_DIR);
List> expecteds=new ArrayList>();
v1=Arrays.asList("1","default");
List v2=Arrays.asList("a","default");
List v3=Arrays.asList("x","default");
expecteds.add(v1);
expecteds.add(v2);
expecteds.add(v3);
Assert.assertEquals(expecteds,actuals);
}
APIUtilityVerifier EqualityVerifier
@Test public void testUniqueElementsList3(){
List> values=new ArrayList>();
List v1=Arrays.asList("1");
List v2=Arrays.asList("2");
List v3=Arrays.asList("3");
List v4=Arrays.asList("4");
values.add(v1);
values.add(v2);
values.add(v3);
values.add(v4);
List> actuals=ListBucketingPruner.DynamicMultiDimensionalCollection.uniqueElementsList(values,DEF_DIR);
List> expecteds=new ArrayList>();
v1=Arrays.asList("1","2","3","4","default");
expecteds.add(v1);
Assert.assertEquals(expecteds,actuals);
}
APIUtilityVerifier EqualityVerifier
@Test public void testUniqueElementsList1(){
List> values=new ArrayList>();
List v1=Arrays.asList("1","a","x");
List v2=Arrays.asList("2","b","x");
List v3=Arrays.asList("1","c","x");
List v4=Arrays.asList("2","a","y");
values.add(v1);
values.add(v2);
values.add(v3);
values.add(v4);
List> actuals=ListBucketingPruner.DynamicMultiDimensionalCollection.uniqueElementsList(values,DEF_DIR);
List> expecteds=new ArrayList>();
v1=Arrays.asList("1","2","default");
v2=Arrays.asList("a","b","c","default");
v3=Arrays.asList("x","y","default");
expecteds.add(v1);
expecteds.add(v2);
expecteds.add(v3);
Assert.assertEquals(expecteds,actuals);
}
APIUtilityVerifier EqualityVerifier
@Test public void testFlat2() throws SemanticException {
List> uniqSkewedElements=new ArrayList>();
List v1=Arrays.asList("1","2");
uniqSkewedElements.add(v1);
List> actual=ListBucketingPruner.DynamicMultiDimensionalCollection.flat(uniqSkewedElements);
List> expected=new ArrayList>();
v1=Arrays.asList("1");
List v2=Arrays.asList("2");
expected.add(v1);
expected.add(v2);
Assert.assertEquals(expected,actual);
}
Class: org.apache.hadoop.hive.ql.optimizer.physical.TestVectorizer BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testAggregateOnUDF() throws HiveException {
AggregationDesc aggDesc=new AggregationDesc();
aggDesc.setGenericUDAFName("sum");
ExprNodeGenericFuncDesc exprNodeDesc=new ExprNodeGenericFuncDesc();
exprNodeDesc.setTypeInfo(TypeInfoFactory.intTypeInfo);
ArrayList params=new ArrayList();
params.add(exprNodeDesc);
aggDesc.setParameters(params);
GenericUDFAbs absUdf=new GenericUDFAbs();
exprNodeDesc.setGenericUDF(absUdf);
List children=new ArrayList();
ExprNodeColumnDesc colExprA=new ExprNodeColumnDesc(Integer.class,"col1","T",false);
ExprNodeColumnDesc colExprB=new ExprNodeColumnDesc(Integer.class,"col2","T",false);
children.add(colExprA);
exprNodeDesc.setChildren(children);
ArrayList outputColumnNames=new ArrayList();
outputColumnNames.add("_col0");
GroupByDesc desc=new GroupByDesc();
desc.setOutputColumnNames(outputColumnNames);
ArrayList aggDescList=new ArrayList();
aggDescList.add(aggDesc);
desc.setAggregators(aggDescList);
ArrayList grpByKeys=new ArrayList();
grpByKeys.add(colExprB);
desc.setKeys(grpByKeys);
GroupByOperator gbyOp=new GroupByOperator(new CompilationOpContext());
gbyOp.setConf(desc);
Vectorizer v=new Vectorizer();
Assert.assertTrue(v.validateMapWorkOperator(gbyOp,null,false));
VectorGroupByOperator vectorOp=(VectorGroupByOperator)v.vectorizeOperator(gbyOp,vContext,false);
Assert.assertEquals(VectorUDAFSumLong.class,vectorOp.getAggregators()[0].getClass());
VectorUDAFSumLong udaf=(VectorUDAFSumLong)vectorOp.getAggregators()[0];
Assert.assertEquals(FuncAbsLongToLong.class,udaf.getInputExpression().getClass());
}
Class: org.apache.hadoop.hive.ql.optimizer.ppr.TestPositivePartitionPrunerCompactExpr APIUtilityVerifier BranchVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testCompactExpr(){
ExprNodeDesc actual=PartitionPruner.compactExpr(expression);
if (expected == null) {
assertNull(actual);
}
else {
assertNotNull("Expected not NULL expression",actual);
assertNotNull("Expected not NULL expression string",actual.getExprString());
assertEquals(expected,actual.getExprString());
}
}
Class: org.apache.hadoop.hive.ql.parse.TestColumnAccess APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testJoinTable1AndTable2() throws ParseException {
String query="select * from t1 join t2 on (t1.id1 = t2.id1)";
Driver driver=createDriver();
int rc=driver.compile(query);
Assert.assertEquals("Checking command success",0,rc);
QueryPlan plan=driver.getPlan();
ColumnAccessInfo columnAccessInfo=plan.getColumnAccessInfo();
List cols=columnAccessInfo.getTableToColumnAccessMap().get("default@t1");
Assert.assertNotNull(cols);
Assert.assertEquals(2,cols.size());
Assert.assertNotNull(cols.contains("id1"));
Assert.assertNotNull(cols.contains("name1"));
cols=columnAccessInfo.getTableToColumnAccessMap().get("default@t2");
Assert.assertNotNull(cols);
Assert.assertEquals(3,cols.size());
Assert.assertNotNull(cols.contains("id2"));
Assert.assertNotNull(cols.contains("id1"));
Assert.assertNotNull(cols.contains("name1"));
Map> tableColsMap=getColsFromReadEntity(plan.getInputs());
cols=tableColsMap.get("default@t1");
Assert.assertNotNull(cols);
Assert.assertEquals(2,cols.size());
Assert.assertNotNull(cols.contains("id1"));
Assert.assertNotNull(cols.contains("name1"));
cols=tableColsMap.get("default@t2");
Assert.assertNotNull(cols);
Assert.assertEquals(3,cols.size());
Assert.assertNotNull(cols.contains("id2"));
Assert.assertNotNull(cols.contains("id1"));
Assert.assertNotNull(cols.contains("name1"));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testQueryTable1() throws ParseException {
String query="select * from t1";
Driver driver=createDriver();
int rc=driver.compile(query);
Assert.assertEquals("Checking command success",0,rc);
QueryPlan plan=driver.getPlan();
ColumnAccessInfo columnAccessInfo=plan.getColumnAccessInfo();
List cols=columnAccessInfo.getTableToColumnAccessMap().get("default@t1");
Assert.assertNotNull(cols);
Assert.assertEquals(2,cols.size());
Assert.assertNotNull(cols.contains("id1"));
Assert.assertNotNull(cols.contains("name1"));
Map> tableColsMap=getColsFromReadEntity(plan.getInputs());
cols=tableColsMap.get("default@t1");
Assert.assertNotNull(cols);
Assert.assertEquals(2,cols.size());
Assert.assertNotNull(cols.contains("id1"));
Assert.assertNotNull(cols.contains("name1"));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testJoinView1AndTable2() throws ParseException {
String query="select * from v1 join t2 on (v1.id1 = t2.id1)";
Driver driver=createDriver();
int rc=driver.compile(query);
Assert.assertEquals("Checking command success",0,rc);
QueryPlan plan=driver.getPlan();
ColumnAccessInfo columnAccessInfo=plan.getColumnAccessInfo();
List cols=columnAccessInfo.getTableToColumnAccessMap().get("default@v1");
Assert.assertNull(cols);
cols=columnAccessInfo.getTableToColumnAccessMap().get("default@t1");
Assert.assertNotNull(cols);
Assert.assertEquals(2,cols.size());
Assert.assertNotNull(cols.contains("id1"));
Assert.assertNotNull(cols.contains("name1"));
cols=columnAccessInfo.getTableToColumnAccessMap().get("default@t2");
Assert.assertNotNull(cols);
Assert.assertEquals(3,cols.size());
Assert.assertNotNull(cols.contains("id2"));
Assert.assertNotNull(cols.contains("id1"));
Assert.assertNotNull(cols.contains("name1"));
Map> tableColsMap=getColsFromReadEntity(plan.getInputs());
cols=tableColsMap.get("default@v1");
Assert.assertNull(cols);
cols=tableColsMap.get("default@t1");
Assert.assertNotNull(cols);
Assert.assertEquals(2,cols.size());
Assert.assertNotNull(cols.contains("id1"));
Assert.assertNotNull(cols.contains("name1"));
cols=tableColsMap.get("default@t2");
Assert.assertNotNull(cols);
Assert.assertEquals(3,cols.size());
Assert.assertNotNull(cols.contains("id2"));
Assert.assertNotNull(cols.contains("id1"));
Assert.assertNotNull(cols.contains("name1"));
}
Class: org.apache.hadoop.hive.ql.parse.TestGenTezWork BooleanVerifier InternalCallVerifier IdentityVerifier EqualityVerifier NullVerifier PublicFieldVerifier HybridVerifier
@Test public void testCreateReduce() throws SemanticException {
proc.process(rs,null,ctx,(Object[])null);
proc.process(fs,null,ctx,(Object[])null);
TezWork work=ctx.currentTask.getWork();
assertEquals(work.getAllWork().size(),2);
BaseWork w=work.getAllWork().get(1);
assertTrue(w instanceof ReduceWork);
assertTrue(work.getParents(w).contains(work.getAllWork().get(0)));
ReduceWork rw=(ReduceWork)w;
assertNotNull(w.getName());
assertSame(rw.getReducer(),fs);
assertEquals(fs.getParentOperators().size(),0);
}
BooleanVerifier InternalCallVerifier IdentityVerifier EqualityVerifier NullVerifier PublicFieldVerifier HybridVerifier
@Test public void testCreateMap() throws SemanticException {
proc.process(rs,null,ctx,(Object[])null);
assertNotNull(ctx.currentTask);
assertTrue(ctx.rootTasks.contains(ctx.currentTask));
TezWork work=ctx.currentTask.getWork();
assertEquals(work.getAllWork().size(),1);
BaseWork w=work.getAllWork().get(0);
assertTrue(w instanceof MapWork);
MapWork mw=(MapWork)w;
assertNotNull(w.getName());
assertSame(mw.getAliasToWork().entrySet().iterator().next().getValue(),ts);
assertSame(ctx.preceedingWork,mw);
assertSame(ctx.currentRootOperator,fs);
}
Class: org.apache.hadoop.hive.ql.parse.TestHiveDecimalParse APIUtilityVerifier EqualityVerifier
@Test public void testDecimalType2() throws ParseException {
String query="create table dec (d decimal(9,7))";
String type=getColumnType(query);
Assert.assertEquals("decimal(9,7)",type);
}
APIUtilityVerifier EqualityVerifier
@Test public void testDecimalType1() throws ParseException {
String query="create table dec (d decimal(5))";
String type=getColumnType(query);
Assert.assertEquals("decimal(5,0)",type);
}
APIUtilityVerifier EqualityVerifier
@Test public void testDecimalType() throws ParseException {
String query="create table dec (d decimal)";
String type=getColumnType(query);
Assert.assertEquals("decimal(10,0)",type);
}
Class: org.apache.hadoop.hive.ql.parse.TestIUD EqualityVerifier
@Test public void testDeleteNoWhere() throws ParseException {
ASTNode ast=parse("DELETE FROM src");
Assert.assertEquals("AST doesn't match","(tok_delete_from " + "(tok_tabname src))",ast.toStringTree());
}
EqualityVerifier
@Test public void testUpdateNoWhereSingleSet() throws ParseException {
ASTNode ast=parse("UPDATE src set key = 3");
Assert.assertEquals("AST doesn't match","(tok_update_table " + "(tok_tabname src) " + "(tok_set_columns_clause "+ "(= "+ "(tok_table_or_col key) 3)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testInsertIntoTableFromAnonymousTable() throws ParseException {
ASTNode ast=parse("insert into table page_view values(-1,2),(3,+4)");
Assert.assertEquals("AST doesn't match","(tok_query " + "(tok_from " + "(tok_virtual_table "+ "(tok_virtual_tabref tok_anonymous) "+ "(tok_values_table (tok_value_row (- 1) 2) (tok_value_row 3 (+ 4))))) "+ "(tok_insert (tok_insert_into (tok_tab (tok_tabname page_view))) "+ "(tok_select (tok_selexpr tok_allcolref))))",ast.toStringTree());
ast=parse("insert into page_view values(-1,2),(3,+4)");
Assert.assertEquals("AST doesn't match","(tok_query " + "(tok_from " + "(tok_virtual_table "+ "(tok_virtual_tabref tok_anonymous) "+ "(tok_values_table (tok_value_row (- 1) 2) (tok_value_row 3 (+ 4))))) "+ "(tok_insert (tok_insert_into (tok_tab (tok_tabname page_view))) "+ "(tok_select (tok_selexpr tok_allcolref))))",ast.toStringTree());
}
EqualityVerifier
/**
* same as testInsertIntoTableAsSelectFromNamedVirtTable but with column list on target table
* @throws ParseException
*/
@Test public void testInsertIntoTableAsSelectFromNamedVirtTableNamedCol() throws ParseException {
ASTNode ast=parse("insert into page_view(c1,c2) select a,b as c from (values (1,2),(3,4)) as vc(a,b) where b = 9");
Assert.assertEquals("AST doesn't match","(tok_query " + "(tok_from " + "(tok_virtual_table "+ "(tok_virtual_tabref (tok_tabname vc) (tok_col_name a b)) "+ "(tok_values_table (tok_value_row 1 2) (tok_value_row 3 4)))) "+ "(tok_insert (tok_insert_into (tok_tab (tok_tabname page_view)) (tok_tabcolname c1 c2)) "+ "(tok_select "+ "(tok_selexpr (tok_table_or_col a)) "+ "(tok_selexpr (tok_table_or_col b) c)) "+ "(tok_where (= (tok_table_or_col b) 9))))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSelectStarFromVirtTable2RowNamedProjections() throws ParseException {
ASTNode ast=parse("select a as c, b as d from (values (1,2),(3,4)) as vc(a,b)");
Assert.assertEquals("AST doesn't match","(tok_query " + "(tok_from " + "(tok_virtual_table "+ "(tok_virtual_tabref (tok_tabname vc) (tok_col_name a b)) "+ "(tok_values_table (tok_value_row 1 2) (tok_value_row 3 4)))) "+ "(tok_insert (tok_destination (tok_dir tok_tmp_file)) "+ "(tok_select (tok_selexpr (tok_table_or_col a) c) (tok_selexpr (tok_table_or_col b) d))))",ast.toStringTree());
}
EqualityVerifier
@Test public void testUpdateNoWhereMultiSet() throws ParseException {
ASTNode ast=parse("UPDATE src set key = 3, value = 8");
Assert.assertEquals("AST doesn't match","(tok_update_table " + "(tok_tabname src) " + "(tok_set_columns_clause "+ "(= "+ "(tok_table_or_col key) 3) "+ "(= "+ "(tok_table_or_col value) 8)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSelectStarFromVirtTable1Row() throws ParseException {
ASTNode ast=parse("select * from (values (3,4)) as vc(a,b)");
Assert.assertEquals("AST doesn't match","(tok_query " + "(tok_from " + "(tok_virtual_table "+ "(tok_virtual_tabref (tok_tabname vc) (tok_col_name a b)) "+ "(tok_values_table (tok_value_row 3 4)))) "+ "(tok_insert (tok_destination (tok_dir tok_tmp_file)) (tok_select (tok_selexpr tok_allcolref))))",ast.toStringTree());
}
EqualityVerifier
@Test public void testStandardInsertIntoTable() throws ParseException {
ASTNode ast=parse("INSERT into TABLE page_view SELECT pvs.viewTime, pvs.userid from page_view_stg pvs where pvs.userid is null");
Assert.assertEquals("AST doesn't match","(tok_query " + "(tok_from " + "(tok_tabref (tok_tabname page_view_stg) pvs)) "+ "(tok_insert (tok_insert_into (tok_tab (tok_tabname page_view))) "+ "(tok_select "+ "(tok_selexpr (. (tok_table_or_col pvs) viewtime)) "+ "(tok_selexpr (. (tok_table_or_col pvs) userid))) "+ "(tok_where (tok_function tok_isnull (. (tok_table_or_col pvs) userid)))))",ast.toStringTree());
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSelectStarFromAnonymousVirtTable1Row() throws ParseException {
try {
parse("select * from `values` (3,4)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:23 missing EOF at '(' near 'values'",ex.getMessage());
}
}
EqualityVerifier
@Test public void testUpdateWithWhereSingleSetExpr() throws ParseException {
ASTNode ast=parse("UPDATE src SET key = -3+(5*9)%8, val = cast(6.1 + c as INT), d = d - 1 WHERE value IS NULL");
Assert.assertEquals("AST doesn't match","(tok_update_table (tok_tabname src) " + "(tok_set_columns_clause " + "(= (tok_table_or_col key) (+ (- 3) (% (* 5 9) 8))) "+ "(= (tok_table_or_col val) (tok_function tok_int (+ 6.1 (tok_table_or_col c)))) "+ "(= (tok_table_or_col d) (- (tok_table_or_col d) 1))) "+ "(tok_where (tok_function tok_isnull (tok_table_or_col value))))",ast.toStringTree());
}
EqualityVerifier
@Test public void testUpdateWithWhereMultiSet() throws ParseException {
ASTNode ast=parse("UPDATE src SET key = 3, value = 8 WHERE VALUE = 1230997");
Assert.assertEquals("AST doesn't match","(tok_update_table " + "(tok_tabname src) " + "(tok_set_columns_clause "+ "(= "+ "(tok_table_or_col key) 3) "+ "(= "+ "(tok_table_or_col value) 8)) "+ "(tok_where (= (tok_table_or_col value) 1230997)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testDeleteWithWhere() throws ParseException {
ASTNode ast=parse("DELETE FROM src WHERE key IS NOT NULL AND src.value < 0");
Assert.assertEquals("AST doesn't match","(tok_delete_from " + "(tok_tabname src) " + "(tok_where "+ "(and "+ "(tok_function tok_isnotnull (tok_table_or_col key)) "+ "(< (. (tok_table_or_col src) value) 0))))",ast.toStringTree());
}
EqualityVerifier
/**
* Same as testInsertIntoTableFromAnonymousTable1Row but with column list on target table
* @throws ParseException
*/
@Test public void testInsertIntoTableFromAnonymousTable1RowNamedCol() throws ParseException {
ASTNode ast=parse("insert into page_view(a,b) values(1,2)");
Assert.assertEquals("AST doesn't match","(tok_query " + "(tok_from " + "(tok_virtual_table "+ "(tok_virtual_tabref tok_anonymous) "+ "(tok_values_table (tok_value_row 1 2))"+ ")"+ ") "+ "(tok_insert "+ "(tok_insert_into "+ "(tok_tab (tok_tabname page_view)) "+ "(tok_tabcolname a b)"+ ") "+ "(tok_select "+ "(tok_selexpr tok_allcolref)"+ ")"+ ")"+ ")",ast.toStringTree());
}
EqualityVerifier
@Test public void testUpdateWithWhereSingleSet() throws ParseException {
ASTNode ast=parse("UPDATE src SET key = 3 WHERE value IS NULL");
Assert.assertEquals("AST doesn't match","(tok_update_table " + "(tok_tabname src) " + "(tok_set_columns_clause "+ "(= "+ "(tok_table_or_col key) 3)) "+ "(tok_where (tok_function tok_isnull (tok_table_or_col value))))",ast.toStringTree());
}
EqualityVerifier
@Test public void testInsertIntoTableFromAnonymousTable1Row() throws ParseException {
ASTNode ast=parse("insert into page_view values(1,2)");
Assert.assertEquals("AST doesn't match","(tok_query " + "(tok_from " + "(tok_virtual_table "+ "(tok_virtual_tabref tok_anonymous) "+ "(tok_values_table (tok_value_row 1 2)))) "+ "(tok_insert (tok_insert_into (tok_tab (tok_tabname page_view))) "+ "(tok_select (tok_selexpr tok_allcolref))))",ast.toStringTree());
}
EqualityVerifier
@Test public void testMultiInsert() throws ParseException {
ASTNode ast=parse("from S insert into T1 select a, b insert into T2 select c, d");
Assert.assertEquals("AST doesn't match","(tok_query (tok_from (tok_tabref (tok_tabname s))) " + "(tok_insert (tok_insert_into (tok_tab (tok_tabname t1))) (tok_select (tok_selexpr (tok_table_or_col a)) (tok_selexpr (tok_table_or_col b)))) " + "(tok_insert (tok_insert_into (tok_tab (tok_tabname t2))) (tok_select (tok_selexpr (tok_table_or_col c)) (tok_selexpr (tok_table_or_col d)))))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSelectStarFromVirtTable2Row() throws ParseException {
ASTNode ast=parse("select * from (values (1,2),(3,4)) as vc(a,b)");
Assert.assertEquals("AST doesn't match","(tok_query " + "(tok_from " + "(tok_virtual_table "+ "(tok_virtual_tabref (tok_tabname vc) (tok_col_name a b)) "+ "(tok_values_table (tok_value_row 1 2) (tok_value_row 3 4)))) "+ "(tok_insert (tok_destination (tok_dir tok_tmp_file)) (tok_select (tok_selexpr tok_allcolref))))",ast.toStringTree());
}
EqualityVerifier
@Test public void testInsertIntoTableAsSelectFromNamedVirtTable() throws ParseException {
ASTNode ast=parse("insert into page_view select a,b as c from (values (1,2),(3,4)) as vc(a,b) where b = 9");
Assert.assertEquals("AST doesn't match","(tok_query " + "(tok_from " + "(tok_virtual_table "+ "(tok_virtual_tabref (tok_tabname vc) (tok_col_name a b)) "+ "(tok_values_table (tok_value_row 1 2) (tok_value_row 3 4)))) "+ "(tok_insert (tok_insert_into (tok_tab (tok_tabname page_view))) "+ "(tok_select "+ "(tok_selexpr (tok_table_or_col a)) "+ "(tok_selexpr (tok_table_or_col b) c)) "+ "(tok_where (= (tok_table_or_col b) 9))))",ast.toStringTree());
}
Class: org.apache.hadoop.hive.ql.parse.TestParseDriver IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testParse() throws Exception {
String selectStr="select field1, field2, sum(field3+field4)";
String whereStr="field5=1 and field6 in ('a', 'b')";
String havingStr="sum(field7) > 11";
ASTNode tree=parseDriver.parse(selectStr + " from table1 where " + whereStr+ " group by field1, field2 having "+ havingStr);
assertEquals(tree.getType(),0);
assertEquals(tree.getChildCount(),2);
ASTNode queryTree=(ASTNode)tree.getChild(0);
assertEquals(tree.getChild(1).getType(),HiveParser.EOF);
assertEquals(queryTree.getChildCount(),2);
ASTNode fromAST=(ASTNode)queryTree.getChild(0);
ASTNode insertAST=(ASTNode)queryTree.getChild(1);
assertEquals(fromAST.getType(),HiveParser.TOK_FROM);
assertEquals(fromAST.getChildCount(),1);
assertEquals(fromAST.getChild(0).getType(),HiveParser.TOK_TABREF);
assertEquals(fromAST.getChild(0).getChildCount(),1);
assertEquals(fromAST.getChild(0).getChild(0).getType(),HiveParser.TOK_TABNAME);
assertEquals(fromAST.getChild(0).getChild(0).getChildCount(),1);
assertEquals(fromAST.getChild(0).getChild(0).getChild(0).getType(),HiveParser.Identifier);
assertEquals(fromAST.getChild(0).getChild(0).getChild(0).getText(),"table1");
assertEquals(insertAST.getChildCount(),5);
assertEquals(insertAST.getChild(0).getType(),HiveParser.TOK_DESTINATION);
assertTree((ASTNode)insertAST.getChild(1),parseDriver.parseSelect(selectStr,null));
assertEquals(insertAST.getChild(2).getType(),HiveParser.TOK_WHERE);
assertTree((ASTNode)insertAST.getChild(2).getChild(0),parseDriver.parseExpression(whereStr));
assertEquals(insertAST.getChild(3).getType(),HiveParser.TOK_GROUPBY);
assertEquals(insertAST.getChild(3).getChildCount(),2);
for (int i=0; i < 2; i++) {
assertEquals(insertAST.getChild(3).getChild(i).getType(),HiveParser.TOK_TABLE_OR_COL);
assertEquals(insertAST.getChild(3).getChild(i).getChild(0).getType(),HiveParser.Identifier);
assertEquals(insertAST.getChild(3).getChild(i).getChild(0).getText(),"field" + (i + 1));
}
assertEquals(insertAST.getChild(4).getType(),HiveParser.TOK_HAVING);
assertEquals(insertAST.getChild(4).getChildCount(),1);
assertTree((ASTNode)insertAST.getChild(4).getChild(0),parseDriver.parseExpression(havingStr));
}
IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testParseExpression() throws Exception {
ASTNode plusNode=parseDriver.parseExpression("field3 + field4");
assertEquals(plusNode.getType(),HiveParser.PLUS);
assertEquals(plusNode.getChildCount(),2);
for (int i=0; i < 2; i++) {
assertEquals(plusNode.getChild(i).getType(),HiveParser.TOK_TABLE_OR_COL);
assertEquals(plusNode.getChild(i).getChildCount(),1);
assertEquals(plusNode.getChild(i).getChild(0).getType(),HiveParser.Identifier);
assertEquals(plusNode.getChild(i).getChild(0).getText(),"field" + (i + 3));
}
ASTNode sumNode=parseDriver.parseExpression("sum(field3 + field4)");
assertEquals(sumNode.getType(),HiveParser.TOK_FUNCTION);
assertEquals(sumNode.getChildCount(),2);
assertEquals(sumNode.getChild(0).getType(),HiveParser.Identifier);
assertEquals(sumNode.getChild(0).getText(),"sum");
assertTree((ASTNode)sumNode.getChild(1),plusNode);
ASTNode tree=parseDriver.parseExpression("case when field1 = 1 then sum(field3 + field4) when field1 != 2 then " + "sum(field3-field4) else sum(field3 * field4) end");
assertEquals(tree.getChildCount(),6);
assertEquals(tree.getChild(0).getType(),HiveParser.KW_WHEN);
assertEquals(tree.getChild(1).getType(),HiveParser.EQUAL);
assertTree((ASTNode)tree.getChild(2),sumNode);
assertEquals(tree.getChild(3).getType(),HiveParser.NOTEQUAL);
assertTree((ASTNode)tree.getChild(4),parseDriver.parseExpression("sum(field3-field4)"));
assertTree((ASTNode)tree.getChild(5),parseDriver.parseExpression("sum(field3*field4)"));
}
IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testParseSelect() throws Exception {
ASTNode tree=parseDriver.parseSelect("select field1, field2, sum(field3+field4)",null);
assertEquals(tree.getType(),HiveParser.TOK_SELECT);
assertEquals(tree.getChildCount(),3);
for (int i=0; i < 3; i++) {
assertEquals(((ASTNode)tree.getChild(i)).getType(),HiveParser.TOK_SELEXPR);
}
assertTree((ASTNode)tree.getChild(0).getChild(0),parseDriver.parseExpression("field1"));
assertTree((ASTNode)tree.getChild(1).getChild(0),parseDriver.parseExpression("field2"));
assertTree((ASTNode)tree.getChild(2).getChild(0),parseDriver.parseExpression("sum(field3+field4)"));
}
Class: org.apache.hadoop.hive.ql.parse.TestQBCompact InternalCallVerifier EqualityVerifier
@Test public void testNonPartitionedTable() throws Exception {
boolean sawException=false;
AlterTableSimpleDesc desc=parseAndAnalyze("alter table foo compact 'major'");
Assert.assertEquals("major",desc.getCompactionType());
Assert.assertEquals("default.foo",desc.getTableName());
}
InternalCallVerifier EqualityVerifier
@Test public void testMinor() throws Exception {
AlterTableSimpleDesc desc=parseAndAnalyze("alter table foo partition(ds = 'today') compact 'minor'");
Assert.assertEquals("minor",desc.getCompactionType());
Assert.assertEquals("default.foo",desc.getTableName());
HashMap parts=desc.getPartSpec();
Assert.assertEquals(1,parts.size());
Assert.assertEquals("today",parts.get("ds"));
}
InternalCallVerifier EqualityVerifier
@Test public void testMajor() throws Exception {
AlterTableSimpleDesc desc=parseAndAnalyze("alter table foo partition(ds = 'today') compact 'major'");
Assert.assertEquals("major",desc.getCompactionType());
Assert.assertEquals("default.foo",desc.getTableName());
HashMap parts=desc.getPartSpec();
Assert.assertEquals(1,parts.size());
Assert.assertEquals("today",parts.get("ds"));
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testBogusLevel() throws Exception {
boolean sawException=false;
try {
parseAndAnalyze("alter table foo partition(ds = 'today') compact 'bogus'");
}
catch ( SemanticException e) {
sawException=true;
Assert.assertEquals(ErrorMsg.INVALID_COMPACTION_TYPE.getMsg(),e.getMessage());
}
Assert.assertTrue(sawException);
}
Class: org.apache.hadoop.hive.ql.parse.TestQBJoinTreeApplyPredicate APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void test3WayJoin() throws SemanticException {
QBJoinTree jT1=createJoinTree(JoinType.INNER,"a",null,"b");
QBJoinTree jT=createJoinTree(JoinType.INNER,"b",jT1,"c");
ASTNode joinCond1=applyEqPredicate(jT,"a","x","b","y");
ASTNode joinCond2=applyEqPredicate(jT,"b","y","c","z");
Assert.assertEquals(jT1.getExpressions().get(0).get(0),joinCond1.getChild(0));
Assert.assertEquals(jT1.getExpressions().get(1).get(0),joinCond1.getChild(1));
Assert.assertEquals(jT.getExpressions().get(0).get(0),joinCond2.getChild(0));
Assert.assertEquals(jT.getExpressions().get(1).get(0),joinCond2.getChild(1));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void test4WayJoin() throws SemanticException {
QBJoinTree jT1=createJoinTree(JoinType.INNER,"a",null,"b");
QBJoinTree jT2=createJoinTree(JoinType.INNER,"b",jT1,"c");
QBJoinTree jT=createJoinTree(JoinType.INNER,"c",jT2,"d");
ASTNode joinCond1=applyEqPredicate(jT,"a","x","b","y");
ASTNode joinCond2=applyEqPredicate(jT,"b","y","c","z");
ASTNode joinCond3=applyEqPredicate(jT,"a","x","c","z");
Assert.assertEquals(jT1.getExpressions().get(0).get(0),joinCond1.getChild(0));
Assert.assertEquals(jT1.getExpressions().get(1).get(0),joinCond1.getChild(1));
Assert.assertEquals(jT2.getExpressions().get(0).get(0),joinCond2.getChild(0));
Assert.assertEquals(jT2.getExpressions().get(1).get(0),joinCond2.getChild(1));
Assert.assertEquals(jT2.getExpressions().get(0).get(1),joinCond3.getChild(0));
Assert.assertEquals(jT2.getExpressions().get(1).get(1),joinCond3.getChild(1));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testSimpleCondn() throws SemanticException {
QBJoinTree jT=createJoinTree(JoinType.INNER,"a",null,"b");
ASTNode joinCond=applyEqPredicate(jT,"a","x","b","y");
Assert.assertEquals(jT.getExpressions().get(0).get(0),joinCond.getChild(0));
Assert.assertEquals(jT.getExpressions().get(1).get(0),joinCond.getChild(1));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void test4WayJoinSwitched() throws SemanticException {
QBJoinTree jT1=createJoinTree(JoinType.INNER,"a",null,"b");
QBJoinTree jT2=createJoinTree(JoinType.INNER,"b",jT1,"c");
QBJoinTree jT=createJoinTree(JoinType.INNER,"c",jT2,"d");
ASTNode joinCond1=applyEqPredicate(jT,"b","y","a","x");
ASTNode joinCond2=applyEqPredicate(jT,"b","y","c","z");
ASTNode joinCond3=applyEqPredicate(jT,"c","z","a","x");
Assert.assertEquals(jT1.getExpressions().get(0).get(0),joinCond1.getChild(1));
Assert.assertEquals(jT1.getExpressions().get(1).get(0),joinCond1.getChild(0));
Assert.assertEquals(jT2.getExpressions().get(0).get(0),joinCond2.getChild(0));
Assert.assertEquals(jT2.getExpressions().get(1).get(0),joinCond2.getChild(1));
Assert.assertEquals(jT2.getExpressions().get(0).get(1),joinCond3.getChild(1));
Assert.assertEquals(jT2.getExpressions().get(1).get(1),joinCond3.getChild(0));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void test3WayJoinSwitched() throws SemanticException {
QBJoinTree jT1=createJoinTree(JoinType.INNER,"a",null,"b");
QBJoinTree jT=createJoinTree(JoinType.INNER,"b",jT1,"c");
ASTNode joinCond1=applyEqPredicate(jT,"b","y","a","x");
ASTNode joinCond2=applyEqPredicate(jT,"b","y","c","z");
Assert.assertEquals(jT1.getExpressions().get(0).get(0),joinCond1.getChild(1));
Assert.assertEquals(jT1.getExpressions().get(1).get(0),joinCond1.getChild(0));
Assert.assertEquals(jT.getExpressions().get(0).get(0),joinCond2.getChild(0));
Assert.assertEquals(jT.getExpressions().get(1).get(0),joinCond2.getChild(1));
}
Class: org.apache.hadoop.hive.ql.parse.TestQBSubQuery APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testRewriteOuterQueryWhere() throws Exception {
ASTNode ast=parse(IN_QUERY);
ASTNode where=where(ast);
List sqs=SubQueryUtils.findSubQueries((ASTNode)where.getChild(0));
ASTNode sq=sqs.get(0);
ASTNode newWhere=SubQueryUtils.rewriteParentQueryWhere((ASTNode)where.getChild(0),sq);
Assert.assertEquals("(= 1 1)",newWhere.toStringTree());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testExtractSubQueries() throws Exception {
ASTNode ast=parse(IN_QUERY);
ASTNode where=where(ast);
List sqs=SubQueryUtils.findSubQueries((ASTNode)where.getChild(0));
Assert.assertEquals(1,sqs.size());
ASTNode sq=sqs.get(0);
Assert.assertEquals("(tok_subquery_expr (tok_subquery_op in) (tok_query (tok_from (tok_tabref (tok_tabname src) s1)) (tok_insert (tok_destination (tok_dir tok_tmp_file)) (tok_select (tok_selexpr (tok_table_or_col key))) (tok_where (and (> (. (tok_table_or_col s1) key) '9') (> (. (tok_table_or_col s1) value) '9'))))) (. (tok_table_or_col src) key))",sq.toStringTree());
}
EqualityVerifier
@Test public void testExtractConjuncts() throws Exception {
ASTNode ast=parse(IN_QUERY);
ASTNode where=where(ast);
List sqs=SubQueryUtils.findSubQueries((ASTNode)where.getChild(0));
ASTNode sq=sqs.get(0);
ASTNode sqWhere=where((ASTNode)sq.getChild(1));
List conjuncts=new ArrayList();
SubQueryUtils.extractConjuncts((ASTNode)sqWhere.getChild(0),conjuncts);
Assert.assertEquals(2,conjuncts.size());
Assert.assertEquals("(> (. (tok_table_or_col s1) key) '9')",conjuncts.get(0).toStringTree());
Assert.assertEquals("(> (. (tok_table_or_col s1) value) '9')",conjuncts.get(1).toStringTree());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testCheckAggOrWindowing() throws Exception {
ASTNode ast=parse(QUERY3);
ASTNode select=select(ast);
Assert.assertEquals(0,SubQueryUtils.checkAggOrWindowing((ASTNode)select.getChild(0)));
Assert.assertEquals(1,SubQueryUtils.checkAggOrWindowing((ASTNode)select.getChild(1)));
Assert.assertEquals(2,SubQueryUtils.checkAggOrWindowing((ASTNode)select.getChild(2)));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testRewriteOuterQueryWhere2() throws Exception {
ASTNode ast=parse(IN_QUERY2);
ASTNode where=where(ast);
List sqs=SubQueryUtils.findSubQueries((ASTNode)where.getChild(0));
ASTNode sq=sqs.get(0);
ASTNode newWhere=SubQueryUtils.rewriteParentQueryWhere((ASTNode)where.getChild(0),sq);
Assert.assertEquals("(> (tok_table_or_col value) '9')",newWhere.toStringTree());
}
Class: org.apache.hadoop.hive.ql.parse.TestSQL11ReservedKeyWordsNegative BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_REGEXP(){
try {
parse("CREATE TABLE REGEXP (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'REGEXP'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_FOR(){
try {
parse("CREATE TABLE FOR (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'FOR'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_RIGHT(){
try {
parse("CREATE TABLE RIGHT (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'RIGHT'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_RANGE(){
try {
parse("CREATE TABLE RANGE (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'RANGE'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_CREATE(){
try {
parse("CREATE TABLE CREATE (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'CREATE'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_READS(){
try {
parse("CREATE TABLE READS (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'READS'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_LATERAL(){
try {
parse("CREATE TABLE LATERAL (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'LATERAL'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_FULL(){
try {
parse("CREATE TABLE FULL (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'FULL'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_PROCEDURE(){
try {
parse("CREATE TABLE PROCEDURE (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'PROCEDURE'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_EXTERNAL(){
try {
parse("CREATE TABLE EXTERNAL (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'EXTERNAL'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_PERCENT(){
try {
parse("CREATE TABLE PERCENT (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'PERCENT'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_BY(){
try {
parse("CREATE TABLE BY (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'BY'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_SMALLINT(){
try {
parse("CREATE TABLE SMALLINT (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'SMALLINT'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_INTERSECT(){
try {
parse("CREATE TABLE INTERSECT (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'INTERSECT'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_ALTER(){
try {
parse("CREATE TABLE ALTER (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'ALTER'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_FLOAT(){
try {
parse("CREATE TABLE FLOAT (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'FLOAT'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_LIKE(){
try {
parse("CREATE TABLE LIKE (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'LIKE'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_FALSE(){
try {
parse("CREATE TABLE FALSE (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'FALSE'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_LOCAL(){
try {
parse("CREATE TABLE LOCAL (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'LOCAL'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_EXISTS(){
try {
parse("CREATE TABLE EXISTS (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'EXISTS'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_INNER(){
try {
parse("CREATE TABLE INNER (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'INNER'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_CURRENT_DATE(){
try {
parse("CREATE TABLE CURRENT_DATE (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'CURRENT_DATE'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_DECIMAL(){
try {
parse("CREATE TABLE DECIMAL (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'DECIMAL'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_FETCH(){
try {
parse("CREATE TABLE FETCH (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'FETCH'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_DESCRIBE(){
try {
parse("CREATE TABLE DESCRIBE (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'DESCRIBE'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_IS(){
try {
parse("CREATE TABLE IS (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'IS'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_ROWS(){
try {
parse("CREATE TABLE ROWS (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'ROWS'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_GROUP(){
try {
parse("CREATE TABLE GROUP (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'GROUP'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_CUBE(){
try {
parse("CREATE TABLE CUBE (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'CUBE'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_TABLE(){
try {
parse("CREATE TABLE TABLE (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'TABLE'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_AS(){
try {
parse("CREATE TABLE AS (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'AS'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_USING(){
try {
parse("CREATE TABLE USING (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'USING'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_TRUNCATE(){
try {
parse("CREATE TABLE TRUNCATE (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'TRUNCATE'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_ORDER(){
try {
parse("CREATE TABLE ORDER (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'ORDER'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_REVOKE(){
try {
parse("CREATE TABLE REVOKE (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'REVOKE'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_BETWEEN(){
try {
parse("CREATE TABLE BETWEEN (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'BETWEEN'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_PARTITION(){
try {
parse("CREATE TABLE PARTITION (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'PARTITION'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_LEFT(){
try {
parse("CREATE TABLE LEFT (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'LEFT'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_IMPORT(){
try {
parse("CREATE TABLE IMPORT (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'IMPORT'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_TO(){
try {
parse("CREATE TABLE TO (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'TO'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_SET(){
try {
parse("CREATE TABLE SET (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'SET'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_CURRENT_TIMESTAMP(){
try {
parse("CREATE TABLE CURRENT_TIMESTAMP (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'CURRENT_TIMESTAMP'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_ROW(){
try {
parse("CREATE TABLE ROW (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'ROW'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_DROP(){
try {
parse("CREATE TABLE DROP (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'DROP'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_OUTER(){
try {
parse("CREATE TABLE OUTER (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'OUTER'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_NULL(){
try {
parse("CREATE TABLE NULL (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'NULL'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_UNION(){
try {
parse("CREATE TABLE UNION (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'UNION'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_TRUE(){
try {
parse("CREATE TABLE TRUE (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'TRUE'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_BINARY(){
try {
parse("CREATE TABLE BINARY (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'BINARY'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_BOOLEAN(){
try {
parse("CREATE TABLE BOOLEAN (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'BOOLEAN'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_BIGINT(){
try {
parse("CREATE TABLE BIGINT (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'BIGINT'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_DELETE(){
try {
parse("CREATE TABLE DELETE (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'DELETE'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_AUTHORIZATION(){
try {
parse("CREATE TABLE AUTHORIZATION (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'AUTHORIZATION'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_USER(){
try {
parse("CREATE TABLE USER (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'USER'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_NONE(){
try {
parse("CREATE TABLE NONE (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'NONE'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_CURSOR(){
try {
parse("CREATE TABLE CURSOR (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'CURSOR'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_GRANT(){
try {
parse("CREATE TABLE GRANT (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'GRANT'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_INSERT(){
try {
parse("CREATE TABLE INSERT (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'INSERT'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_VALUES(){
try {
parse("CREATE TABLE VALUES (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'VALUES'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_ARRAY(){
try {
parse("CREATE TABLE ARRAY (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'ARRAY'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_BOTH(){
try {
parse("CREATE TABLE BOTH (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'BOTH'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_RLIKE(){
try {
parse("CREATE TABLE RLIKE (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'RLIKE'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_UPDATE(){
try {
parse("CREATE TABLE UPDATE (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'UPDATE'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_ALL(){
try {
parse("CREATE TABLE ALL (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'ALL'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_ROLLUP(){
try {
parse("CREATE TABLE ROLLUP (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'ROLLUP'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_TRIGGER(){
try {
parse("CREATE TABLE TRIGGER (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'TRIGGER'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_OF(){
try {
parse("CREATE TABLE OF (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'OF'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_INTO(){
try {
parse("CREATE TABLE INTO (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'INTO'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_OUT(){
try {
parse("CREATE TABLE OUT (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'OUT'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_DOUBLE(){
try {
parse("CREATE TABLE DOUBLE (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'DOUBLE'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_WITH(){
try {
parse("CREATE TABLE WITH (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'WITH'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_GROUPING(){
try {
parse("CREATE TABLE GROUPING (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'GROUPING'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_DATE(){
try {
parse("CREATE TABLE DATE (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'DATE'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_IN(){
try {
parse("CREATE TABLE IN (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'IN'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_TIMESTAMP(){
try {
parse("CREATE TABLE TIMESTAMP (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'TIMESTAMP'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testSQL11ReservedKeyWords_INT(){
try {
parse("CREATE TABLE INT (col STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:13 Failed to recognize predicate 'INT'. Failed rule: 'identifier' in table name",ex.getMessage());
}
}
Class: org.apache.hadoop.hive.ql.parse.TestSQL11ReservedKeyWordsPositive EqualityVerifier
@Test public void testSQL11ReservedKeyWords_WITH() throws ParseException {
ASTNode ast=parse("CREATE TABLE WITH (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname with) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_FULL() throws ParseException {
ASTNode ast=parse("CREATE TABLE FULL (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname full) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_TRUNCATE() throws ParseException {
ASTNode ast=parse("CREATE TABLE TRUNCATE (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname truncate) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_PERCENT() throws ParseException {
ASTNode ast=parse("CREATE TABLE PERCENT (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname percent) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_ROWS() throws ParseException {
ASTNode ast=parse("CREATE TABLE ROWS (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname rows) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_LIKE() throws ParseException {
ASTNode ast=parse("CREATE TABLE LIKE (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname like) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_UNION() throws ParseException {
ASTNode ast=parse("CREATE TABLE UNION (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname union) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_INT() throws ParseException {
ASTNode ast=parse("CREATE TABLE INT (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname int) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_CURSOR() throws ParseException {
ASTNode ast=parse("CREATE TABLE CURSOR (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname cursor) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_ARRAY() throws ParseException {
ASTNode ast=parse("CREATE TABLE ARRAY (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname array) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_DECIMAL() throws ParseException {
ASTNode ast=parse("CREATE TABLE DECIMAL (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname decimal) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_CURRENT_DATE() throws ParseException {
ASTNode ast=parse("CREATE TABLE CURRENT_DATE (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname current_date) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_USING() throws ParseException {
ASTNode ast=parse("CREATE TABLE USING (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname using) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_AS() throws ParseException {
ASTNode ast=parse("CREATE TABLE AS (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname as) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_BIGINT() throws ParseException {
ASTNode ast=parse("CREATE TABLE BIGINT (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname bigint) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_VALUES() throws ParseException {
ASTNode ast=parse("CREATE TABLE VALUES (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname values) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_ORDER() throws ParseException {
ASTNode ast=parse("CREATE TABLE ORDER (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname order) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_GRANT() throws ParseException {
ASTNode ast=parse("CREATE TABLE GRANT (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname grant) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_REGEXP() throws ParseException {
ASTNode ast=parse("CREATE TABLE REGEXP (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname regexp) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_NONE() throws ParseException {
ASTNode ast=parse("CREATE TABLE NONE (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname none) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_BOOLEAN() throws ParseException {
ASTNode ast=parse("CREATE TABLE BOOLEAN (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname boolean) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_CURRENT_TIMESTAMP() throws ParseException {
ASTNode ast=parse("CREATE TABLE CURRENT_TIMESTAMP (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname current_timestamp) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_REVOKE() throws ParseException {
ASTNode ast=parse("CREATE TABLE REVOKE (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname revoke) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_DATE() throws ParseException {
ASTNode ast=parse("CREATE TABLE DATE (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname date) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_GROUPING() throws ParseException {
ASTNode ast=parse("CREATE TABLE GROUPING (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname grouping) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_TO() throws ParseException {
ASTNode ast=parse("CREATE TABLE TO (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname to) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_BOTH() throws ParseException {
ASTNode ast=parse("CREATE TABLE BOTH (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname both) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_LATERAL() throws ParseException {
ASTNode ast=parse("CREATE TABLE LATERAL (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname lateral) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_TRIGGER() throws ParseException {
ASTNode ast=parse("CREATE TABLE TRIGGER (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname trigger) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_EXISTS() throws ParseException {
ASTNode ast=parse("CREATE TABLE EXISTS (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname exists) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_SMALLINT() throws ParseException {
ASTNode ast=parse("CREATE TABLE SMALLINT (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname smallint) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_ALTER() throws ParseException {
ASTNode ast=parse("CREATE TABLE ALTER (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname alter) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_OUT() throws ParseException {
ASTNode ast=parse("CREATE TABLE OUT (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname out) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_FALSE() throws ParseException {
ASTNode ast=parse("CREATE TABLE FALSE (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname false) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_ALL() throws ParseException {
ASTNode ast=parse("CREATE TABLE ALL (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname all) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_GROUP() throws ParseException {
ASTNode ast=parse("CREATE TABLE GROUP (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname group) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_BY() throws ParseException {
ASTNode ast=parse("CREATE TABLE BY (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname by) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_ROLLUP() throws ParseException {
ASTNode ast=parse("CREATE TABLE ROLLUP (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname rollup) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_PROCEDURE() throws ParseException {
ASTNode ast=parse("CREATE TABLE PROCEDURE (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname procedure) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_TIMESTAMP() throws ParseException {
ASTNode ast=parse("CREATE TABLE TIMESTAMP (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname timestamp) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_USER() throws ParseException {
ASTNode ast=parse("CREATE TABLE USER (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname user) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_INTO() throws ParseException {
ASTNode ast=parse("CREATE TABLE INTO (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname into) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_RANGE() throws ParseException {
ASTNode ast=parse("CREATE TABLE RANGE (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname range) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_EXTERNAL() throws ParseException {
ASTNode ast=parse("CREATE TABLE EXTERNAL (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname external) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_DOUBLE() throws ParseException {
ASTNode ast=parse("CREATE TABLE DOUBLE (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname double) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_OF() throws ParseException {
ASTNode ast=parse("CREATE TABLE OF (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname of) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_DESCRIBE() throws ParseException {
ASTNode ast=parse("CREATE TABLE DESCRIBE (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname describe) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_READS() throws ParseException {
ASTNode ast=parse("CREATE TABLE READS (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname reads) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_ROW() throws ParseException {
ASTNode ast=parse("CREATE TABLE ROW (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname row) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_INNER() throws ParseException {
ASTNode ast=parse("CREATE TABLE INNER (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname inner) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_RLIKE() throws ParseException {
ASTNode ast=parse("CREATE TABLE RLIKE (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname rlike) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_RIGHT() throws ParseException {
ASTNode ast=parse("CREATE TABLE RIGHT (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname right) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_UPDATE() throws ParseException {
ASTNode ast=parse("CREATE TABLE UPDATE (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname update) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_BINARY() throws ParseException {
ASTNode ast=parse("CREATE TABLE BINARY (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname binary) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_INSERT() throws ParseException {
ASTNode ast=parse("CREATE TABLE INSERT (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname insert) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_LOCAL() throws ParseException {
ASTNode ast=parse("CREATE TABLE LOCAL (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname local) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_SET() throws ParseException {
ASTNode ast=parse("CREATE TABLE SET (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname set) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_NULL() throws ParseException {
ASTNode ast=parse("CREATE TABLE NULL (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname null) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_TABLE() throws ParseException {
ASTNode ast=parse("CREATE TABLE TABLE (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname table) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_IN() throws ParseException {
ASTNode ast=parse("CREATE TABLE IN (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname in) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_DELETE() throws ParseException {
ASTNode ast=parse("CREATE TABLE DELETE (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname delete) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_CREATE() throws ParseException {
ASTNode ast=parse("CREATE TABLE CREATE (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname create) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_DROP() throws ParseException {
ASTNode ast=parse("CREATE TABLE DROP (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname drop) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_IS() throws ParseException {
ASTNode ast=parse("CREATE TABLE IS (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname is) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_TRUE() throws ParseException {
ASTNode ast=parse("CREATE TABLE TRUE (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname true) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_FETCH() throws ParseException {
ASTNode ast=parse("CREATE TABLE FETCH (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname fetch) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_FOR() throws ParseException {
ASTNode ast=parse("CREATE TABLE FOR (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname for) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_FLOAT() throws ParseException {
ASTNode ast=parse("CREATE TABLE FLOAT (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname float) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_IMPORT() throws ParseException {
ASTNode ast=parse("CREATE TABLE IMPORT (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname import) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_INTERSECT() throws ParseException {
ASTNode ast=parse("CREATE TABLE INTERSECT (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname intersect) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_AUTHORIZATION() throws ParseException {
ASTNode ast=parse("CREATE TABLE AUTHORIZATION (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname authorization) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_BETWEEN() throws ParseException {
ASTNode ast=parse("CREATE TABLE BETWEEN (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname between) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_LEFT() throws ParseException {
ASTNode ast=parse("CREATE TABLE LEFT (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname left) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_CUBE() throws ParseException {
ASTNode ast=parse("CREATE TABLE CUBE (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname cube) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_PARTITION() throws ParseException {
ASTNode ast=parse("CREATE TABLE PARTITION (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname partition) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
EqualityVerifier
@Test public void testSQL11ReservedKeyWords_OUTER() throws ParseException {
ASTNode ast=parse("CREATE TABLE OUTER (col STRING)");
Assert.assertEquals("AST doesn't match","(tok_createtable (tok_tabname outer) tok_liketable (tok_tabcollist (tok_tabcol col tok_string)))",ast.toStringTree());
}
Class: org.apache.hadoop.hive.ql.parse.TestSplitSample EqualityVerifier
@Test public void testEstimateSourceSizeRowCount(){
splitSample=new SplitSample(ROW_COUNT);
assertEquals(123,splitSample.estimateSourceSize(123));
}
EqualityVerifier
@Test public void testGetTargetSizePercent(){
splitSample=new SplitSample(PERCENT,SEED_NUM);
assertEquals(20,splitSample.getTargetSize(1000));
}
EqualityVerifier
@Test public void testEstimateSourceSizeTotalLength(){
splitSample=new SplitSample(TOTAL_LENGTH,SEED_NUM);
assertEquals(10,splitSample.estimateSourceSize(10));
}
InternalCallVerifier EqualityVerifier
@Test public void testGetTargetSizeTotalLength(){
splitSample=new SplitSample(TOTAL_LENGTH,SEED_NUM);
assertEquals(TOTAL_LENGTH,splitSample.getTargetSize(1000));
assertEquals(TOTAL_LENGTH,splitSample.getTargetSize(100));
}
EqualityVerifier
@Test public void testEstimateSourceSizePercent(){
splitSample=new SplitSample(PERCENT,SEED_NUM);
assertEquals(500,splitSample.estimateSourceSize(10));
}
Class: org.apache.hadoop.hive.ql.parse.TestUnpermittedCharsInColumnNameCreateTableNegative BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testColonInCreateTable(){
try {
parse("CREATE TABLE testTable (`emp:no` STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:39 Failed to recognize predicate ')'. Failed rule: '[., :] can not be used in column name in create table statement.' in column specification",ex.getMessage());
}
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testDotInCreateTable(){
try {
parse("CREATE TABLE testTable (`emp.no` STRING)");
Assert.assertFalse("Expected ParseException",true);
}
catch ( ParseException ex) {
Assert.assertEquals("Failure didn't match.","line 1:39 Failed to recognize predicate ')'. Failed rule: '[., :] can not be used in column name in create table statement.' in column specification",ex.getMessage());
}
}
Class: org.apache.hadoop.hive.ql.parse.authorization.TestHiveAuthorizationTaskFactory BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* GRANT ROLE ... TO ROLE ...
*/
@Test public void testGrantRoleRole() throws Exception {
DDLWork work=analyze("GRANT ROLE " + ROLE + " TO ROLE "+ ROLE);
GrantRevokeRoleDDL grantDesc=work.getGrantRevokeRoleDDL();
Assert.assertNotNull("Grant should not be null",grantDesc);
Assert.assertTrue("Expected grant ",grantDesc.getGrant());
Assert.assertFalse("With admin option is not specified",grantDesc.isGrantOption());
Assert.assertEquals(currentUser,grantDesc.getGrantor());
Assert.assertEquals(PrincipalType.USER,grantDesc.getGrantorType());
for ( String role : ListSizeMatcher.inList(grantDesc.getRoles()).ofSize(1)) {
Assert.assertEquals(ROLE,role);
}
for ( PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipalDesc()).ofSize(1)) {
Assert.assertEquals(PrincipalType.ROLE,principal.getType());
Assert.assertEquals(ROLE,principal.getName());
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* REVOKE ROLE ... FROM GROUP ...
*/
@Test public void testRevokeRoleGroup() throws Exception {
DDLWork work=analyze("REVOKE ROLE " + ROLE + " FROM GROUP "+ GROUP);
GrantRevokeRoleDDL grantDesc=work.getGrantRevokeRoleDDL();
Assert.assertNotNull("Grant should not be null",grantDesc);
Assert.assertFalse("Did not expect grant ",grantDesc.getGrant());
Assert.assertFalse("With admin option is not specified",grantDesc.isGrantOption());
Assert.assertEquals(currentUser,grantDesc.getGrantor());
Assert.assertEquals(PrincipalType.USER,grantDesc.getGrantorType());
for ( String role : ListSizeMatcher.inList(grantDesc.getRoles()).ofSize(1)) {
Assert.assertEquals(ROLE,role);
}
for ( PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipalDesc()).ofSize(1)) {
Assert.assertEquals(PrincipalType.GROUP,principal.getType());
Assert.assertEquals(GROUP,principal.getName());
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* CREATE ROLE ...
*/
@Test public void testCreateRole() throws Exception {
DDLWork work=analyze("CREATE ROLE " + ROLE);
RoleDDLDesc roleDesc=work.getRoleDDLDesc();
Assert.assertNotNull("Role should not be null",roleDesc);
Assert.assertEquals(RoleOperation.CREATE_ROLE,roleDesc.getOperation());
Assert.assertFalse("Did not expect a group",roleDesc.getGroup());
Assert.assertEquals(ROLE,roleDesc.getName());
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* SHOW GRANT USER ... ON TABLE ...
*/
@Test public void testShowGrantUserOnTable() throws Exception {
DDLWork work=analyze("SHOW GRANT USER " + USER + " ON TABLE "+ TABLE);
ShowGrantDesc grantDesc=work.getShowGrantDesc();
Assert.assertNotNull("Show grant should not be null",grantDesc);
Assert.assertEquals(PrincipalType.USER,grantDesc.getPrincipalDesc().getType());
Assert.assertEquals(USER,grantDesc.getPrincipalDesc().getName());
Assert.assertTrue("Expected table",grantDesc.getHiveObj().getTable());
Assert.assertEquals(TABLE_QNAME,grantDesc.getHiveObj().getObject());
Assert.assertTrue("Expected table",grantDesc.getHiveObj().getTable());
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* SHOW ROLE GRANT ROLE ...
*/
@Test public void testShowRoleGrantRole() throws Exception {
DDLWork work=analyze("SHOW ROLE GRANT ROLE " + ROLE);
RoleDDLDesc roleDesc=work.getRoleDDLDesc();
Assert.assertNotNull("Role should not be null",roleDesc);
Assert.assertEquals(RoleOperation.SHOW_ROLE_GRANT,roleDesc.getOperation());
Assert.assertEquals(PrincipalType.ROLE,roleDesc.getPrincipalType());
Assert.assertEquals(ROLE,roleDesc.getName());
}
UtilityVerifier EqualityVerifier HybridVerifier
/**
* GRANT ALL ON SERVER
*/
@Test public void testGrantServer() throws Exception {
String serverName="foo";
try {
analyze("GRANT ALL ON SERVER " + serverName + " TO USER user2");
Assert.fail("Grant on Server should fail");
}
catch ( SemanticException e) {
Assert.assertEquals(serverName,DummyHiveAuthorizationTaskFactoryImpl.serverName);
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* GRANT ROLE ... TO USER ...
*/
@Test public void testGrantRoleUser() throws Exception {
DDLWork work=analyze("GRANT ROLE " + ROLE + " TO USER "+ USER);
GrantRevokeRoleDDL grantDesc=work.getGrantRevokeRoleDDL();
Assert.assertNotNull("Grant should not be null",grantDesc);
Assert.assertTrue("Expected grant ",grantDesc.getGrant());
Assert.assertFalse("With admin option is not specified",grantDesc.isGrantOption());
Assert.assertEquals(currentUser,grantDesc.getGrantor());
Assert.assertEquals(PrincipalType.USER,grantDesc.getGrantorType());
for ( String role : ListSizeMatcher.inList(grantDesc.getRoles()).ofSize(1)) {
Assert.assertEquals(ROLE,role);
}
for ( PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipalDesc()).ofSize(1)) {
Assert.assertEquals(PrincipalType.USER,principal.getType());
Assert.assertEquals(USER,principal.getName());
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* REVOKE ... ON TABLE ... FROM ROLE ...
*/
@Test public void testRevokeRoleTable() throws Exception {
DDLWork work=analyze("REVOKE " + SELECT + " ON TABLE "+ TABLE+ " FROM ROLE "+ ROLE);
RevokeDesc grantDesc=work.getRevokeDesc();
Assert.assertNotNull("Revoke should not be null",grantDesc);
for ( PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) {
Assert.assertEquals(PrincipalType.ROLE,principal.getType());
Assert.assertEquals(ROLE,principal.getName());
}
for ( PrivilegeDesc privilege : ListSizeMatcher.inList(grantDesc.getPrivileges()).ofSize(1)) {
Assert.assertEquals(Privilege.SELECT,privilege.getPrivilege());
}
Assert.assertTrue("Expected table",grantDesc.getPrivilegeSubjectDesc().getTable());
Assert.assertEquals(TABLE_QNAME,grantDesc.getPrivilegeSubjectDesc().getObject());
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* SHOW GRANT ROLE ... ON TABLE ...
*/
@Test public void testShowGrantRoleOnTable() throws Exception {
DDLWork work=analyze("SHOW GRANT ROLE " + ROLE + " ON TABLE "+ TABLE);
ShowGrantDesc grantDesc=work.getShowGrantDesc();
Assert.assertNotNull("Show grant should not be null",grantDesc);
Assert.assertEquals(PrincipalType.ROLE,grantDesc.getPrincipalDesc().getType());
Assert.assertEquals(ROLE,grantDesc.getPrincipalDesc().getName());
Assert.assertTrue("Expected table",grantDesc.getHiveObj().getTable());
Assert.assertEquals(TABLE_QNAME,grantDesc.getHiveObj().getObject());
Assert.assertTrue("Expected table",grantDesc.getHiveObj().getTable());
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* GRANT ... ON TABLE ... TO GROUP ...
*/
@Test public void testGrantGroupTable() throws Exception {
DDLWork work=analyze("GRANT " + SELECT + " ON TABLE "+ TABLE+ " TO GROUP "+ GROUP);
GrantDesc grantDesc=work.getGrantDesc();
Assert.assertNotNull("Grant should not be null",grantDesc);
for ( PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) {
Assert.assertEquals(PrincipalType.GROUP,principal.getType());
Assert.assertEquals(GROUP,principal.getName());
}
for ( PrivilegeDesc privilege : ListSizeMatcher.inList(grantDesc.getPrivileges()).ofSize(1)) {
Assert.assertEquals(Privilege.SELECT,privilege.getPrivilege());
}
Assert.assertTrue("Expected table",grantDesc.getPrivilegeSubjectDesc().getTable());
Assert.assertEquals(TABLE_QNAME,grantDesc.getPrivilegeSubjectDesc().getObject());
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* GRANT ROLE ... TO GROUP ...
*/
@Test public void testGrantRoleGroup() throws Exception {
DDLWork work=analyze("GRANT ROLE " + ROLE + " TO GROUP "+ GROUP);
GrantRevokeRoleDDL grantDesc=work.getGrantRevokeRoleDDL();
Assert.assertNotNull("Grant should not be null",grantDesc);
Assert.assertTrue("Expected grant ",grantDesc.getGrant());
Assert.assertFalse("With admin option is not specified",grantDesc.isGrantOption());
Assert.assertEquals(currentUser,grantDesc.getGrantor());
Assert.assertEquals(PrincipalType.USER,grantDesc.getGrantorType());
for ( String role : ListSizeMatcher.inList(grantDesc.getRoles()).ofSize(1)) {
Assert.assertEquals(ROLE,role);
}
for ( PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipalDesc()).ofSize(1)) {
Assert.assertEquals(PrincipalType.GROUP,principal.getType());
Assert.assertEquals(GROUP,principal.getName());
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* REVOKE ... ON TABLE ... FROM GROUP ...
*/
@Test public void testRevokeGroupTable() throws Exception {
DDLWork work=analyze("REVOKE " + SELECT + " ON TABLE "+ TABLE+ " FROM GROUP "+ GROUP);
RevokeDesc grantDesc=work.getRevokeDesc();
Assert.assertNotNull("Revoke should not be null",grantDesc);
for ( PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) {
Assert.assertEquals(PrincipalType.GROUP,principal.getType());
Assert.assertEquals(GROUP,principal.getName());
}
for ( PrivilegeDesc privilege : ListSizeMatcher.inList(grantDesc.getPrivileges()).ofSize(1)) {
Assert.assertEquals(Privilege.SELECT,privilege.getPrivilege());
}
Assert.assertTrue("Expected table",grantDesc.getPrivilegeSubjectDesc().getTable());
Assert.assertEquals(TABLE_QNAME,grantDesc.getPrivilegeSubjectDesc().getObject());
}
UtilityVerifier EqualityVerifier HybridVerifier
/**
* GRANT ALL ON URI
*/
@Test public void testGrantUri() throws Exception {
String uriPath="/tmp";
try {
analyze("GRANT ALL ON URI '" + uriPath + "' TO USER user2");
Assert.fail("Grant on URI should fail");
}
catch ( SemanticException e) {
Assert.assertEquals(uriPath,DummyHiveAuthorizationTaskFactoryImpl.uriPath);
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* SHOW GRANT GROUP ... ON TABLE ...
*/
@Test public void testShowGrantGroupOnTable() throws Exception {
DDLWork work=analyze("SHOW GRANT GROUP " + GROUP + " ON TABLE "+ TABLE);
ShowGrantDesc grantDesc=work.getShowGrantDesc();
Assert.assertNotNull("Show grant should not be null",grantDesc);
Assert.assertEquals(PrincipalType.GROUP,grantDesc.getPrincipalDesc().getType());
Assert.assertEquals(GROUP,grantDesc.getPrincipalDesc().getName());
Assert.assertTrue("Expected table",grantDesc.getHiveObj().getTable());
Assert.assertEquals(TABLE_QNAME,grantDesc.getHiveObj().getObject());
Assert.assertTrue("Expected table",grantDesc.getHiveObj().getTable());
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* REVOKE ROLE ... FROM ROLE ...
*/
@Test public void testRevokeRoleRole() throws Exception {
DDLWork work=analyze("REVOKE ROLE " + ROLE + " FROM ROLE "+ ROLE);
GrantRevokeRoleDDL grantDesc=work.getGrantRevokeRoleDDL();
Assert.assertNotNull("Grant should not be null",grantDesc);
Assert.assertFalse("Did not expect grant ",grantDesc.getGrant());
Assert.assertFalse("With admin option is not specified",grantDesc.isGrantOption());
Assert.assertEquals(currentUser,grantDesc.getGrantor());
Assert.assertEquals(PrincipalType.USER,grantDesc.getGrantorType());
for ( String role : ListSizeMatcher.inList(grantDesc.getRoles()).ofSize(1)) {
Assert.assertEquals(ROLE,role);
}
for ( PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipalDesc()).ofSize(1)) {
Assert.assertEquals(PrincipalType.ROLE,principal.getType());
Assert.assertEquals(ROLE,principal.getName());
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* GRANT ... ON TABLE ... TO ROLE ...
*/
@Test public void testGrantRoleTable() throws Exception {
DDLWork work=analyze("GRANT " + SELECT + " ON TABLE "+ TABLE+ " TO ROLE "+ ROLE);
GrantDesc grantDesc=work.getGrantDesc();
Assert.assertNotNull("Grant should not be null",grantDesc);
for ( PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) {
Assert.assertEquals(PrincipalType.ROLE,principal.getType());
Assert.assertEquals(ROLE,principal.getName());
}
for ( PrivilegeDesc privilege : ListSizeMatcher.inList(grantDesc.getPrivileges()).ofSize(1)) {
Assert.assertEquals(Privilege.SELECT,privilege.getPrivilege());
}
Assert.assertTrue("Expected table",grantDesc.getPrivilegeSubjectDesc().getTable());
Assert.assertEquals(TABLE_QNAME,grantDesc.getPrivilegeSubjectDesc().getObject());
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* REVOKE ... ON TABLE ... FROM USER ...
*/
@Test public void testRevokeUserTable() throws Exception {
DDLWork work=analyze("REVOKE " + SELECT + " ON TABLE "+ TABLE+ " FROM USER "+ USER);
RevokeDesc grantDesc=work.getRevokeDesc();
Assert.assertNotNull("Revoke should not be null",grantDesc);
for ( PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) {
Assert.assertEquals(PrincipalType.USER,principal.getType());
Assert.assertEquals(USER,principal.getName());
}
for ( PrivilegeDesc privilege : ListSizeMatcher.inList(grantDesc.getPrivileges()).ofSize(1)) {
Assert.assertEquals(Privilege.SELECT,privilege.getPrivilege());
}
Assert.assertTrue("Expected table",grantDesc.getPrivilegeSubjectDesc().getTable());
Assert.assertEquals(TABLE_QNAME,grantDesc.getPrivilegeSubjectDesc().getObject());
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* DROP ROLE ...
*/
@Test public void testDropRole() throws Exception {
DDLWork work=analyze("DROp ROLE " + ROLE);
RoleDDLDesc roleDesc=work.getRoleDDLDesc();
Assert.assertNotNull("Role should not be null",roleDesc);
Assert.assertEquals(RoleOperation.DROP_ROLE,roleDesc.getOperation());
Assert.assertFalse("Did not expect a group",roleDesc.getGroup());
Assert.assertEquals(ROLE,roleDesc.getName());
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* GRANT ... ON TABLE ... TO USER ...
*/
@Test public void testGrantUserTable() throws Exception {
DDLWork work=analyze("GRANT " + SELECT + " ON TABLE "+ TABLE+ " TO USER "+ USER);
GrantDesc grantDesc=work.getGrantDesc();
Assert.assertNotNull("Grant should not be null",grantDesc);
for ( PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) {
Assert.assertEquals(PrincipalType.USER,principal.getType());
Assert.assertEquals(USER,principal.getName());
}
for ( PrivilegeDesc privilege : ListSizeMatcher.inList(grantDesc.getPrivileges()).ofSize(1)) {
Assert.assertEquals(Privilege.SELECT,privilege.getPrivilege());
}
Assert.assertTrue("Expected table",grantDesc.getPrivilegeSubjectDesc().getTable());
Assert.assertEquals(TABLE_QNAME,grantDesc.getPrivilegeSubjectDesc().getObject());
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* REVOKE ROLE ... FROM USER ...
*/
@Test public void testRevokeRoleUser() throws Exception {
DDLWork work=analyze("REVOKE ROLE " + ROLE + " FROM USER "+ USER);
GrantRevokeRoleDDL grantDesc=work.getGrantRevokeRoleDDL();
Assert.assertNotNull("Grant should not be null",grantDesc);
Assert.assertFalse("Did not expect grant ",grantDesc.getGrant());
Assert.assertFalse("With admin option is not specified",grantDesc.isGrantOption());
Assert.assertEquals(currentUser,grantDesc.getGrantor());
Assert.assertEquals(PrincipalType.USER,grantDesc.getGrantorType());
for ( String role : ListSizeMatcher.inList(grantDesc.getRoles()).ofSize(1)) {
Assert.assertEquals(ROLE,role);
}
for ( PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipalDesc()).ofSize(1)) {
Assert.assertEquals(PrincipalType.USER,principal.getType());
Assert.assertEquals(USER,principal.getName());
}
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* SHOW ROLE GRANT USER ...
*/
@Test public void testShowRoleGrantUser() throws Exception {
DDLWork work=analyze("SHOW ROLE GRANT USER " + USER);
RoleDDLDesc roleDesc=work.getRoleDDLDesc();
Assert.assertNotNull("Role should not be null",roleDesc);
Assert.assertEquals(RoleOperation.SHOW_ROLE_GRANT,roleDesc.getOperation());
Assert.assertEquals(PrincipalType.USER,roleDesc.getPrincipalType());
Assert.assertEquals(USER,roleDesc.getName());
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* SHOW ROLE GRANT GROUP ...
*/
@Test public void testShowRoleGrantGroup() throws Exception {
DDLWork work=analyze("SHOW ROLE GRANT GROUP " + GROUP);
RoleDDLDesc roleDesc=work.getRoleDDLDesc();
Assert.assertNotNull("Role should not be null",roleDesc);
Assert.assertEquals(RoleOperation.SHOW_ROLE_GRANT,roleDesc.getOperation());
Assert.assertEquals(PrincipalType.GROUP,roleDesc.getPrincipalType());
Assert.assertEquals(GROUP,roleDesc.getName());
}
Class: org.apache.hadoop.hive.ql.parse.authorization.TestSessionUserName InternalCallVerifier EqualityVerifier
/**
* Test if the authorization factory gets the username provided by
* the authenticator, if SesstionState is created without username
* @throws Exception
*/
@Test public void testSessionDefaultUser() throws Exception {
SessionState ss=new SessionState(getAuthV2HiveConf());
setupDataNucleusFreeHive(ss.getConf());
SessionState.start(ss);
Assert.assertEquals("check username",ss.getAuthenticator().getUserName(),HiveAuthorizerStoringUserNameFactory.username);
}
EqualityVerifier
/**
* Test if the authorization factory gets the username set in the SessionState constructor
* @throws Exception
*/
@Test public void testSessionConstructorUser() throws Exception {
final String USER_NAME="authtestuser";
SessionState ss=new SessionState(getAuthV2HiveConf(),USER_NAME);
setupDataNucleusFreeHive(ss.getConf());
SessionState.start(ss);
ss.getAuthenticator();
Assert.assertEquals("check username",USER_NAME,HiveAuthorizerStoringUserNameFactory.username);
}
Class: org.apache.hadoop.hive.ql.parse.authorization.plugin.sqlstd.TestOperation2Privilege BranchVerifier UtilityVerifier EqualityVerifier HybridVerifier
/**
* test that all enums in {@link HiveOperationType} match one map entry in
* Operation2Privilege
*/
@Test public void checkHiveOperationTypeMatch(){
Set operationMapKeys=Operation2Privilege.getOperationTypes();
for ( HiveOperationType operationType : HiveOperationType.values()) {
if (!operationMapKeys.contains(operationType)) {
fail("Unable to find corresponding entry in Operation2Privilege map for HiveOperationType " + operationType);
}
}
assertEquals("Check if Operation2Privilege, HiveOperationType have same number of instances",operationMapKeys.size(),HiveOperationType.values().length);
}
Class: org.apache.hadoop.hive.ql.parse.positive.TestTransactionStatement EqualityVerifier
@Test public void testTxnStart() throws ParseException {
ASTNode ast=parse("START TRANSACTION");
Assert.assertEquals("AST doesn't match","tok_start_transaction",ast.toStringTree());
ast=parse("START TRANSACTION ISOLATION LEVEL SNAPSHOT");
Assert.assertEquals("AST doesn't match","(tok_start_transaction (tok_isolation_level tok_isolation_snapshot))",ast.toStringTree());
ast=parse("START TRANSACTION READ ONLY");
Assert.assertEquals("AST doesn't match","(tok_start_transaction (tok_txn_access_mode tok_txn_read_only))",ast.toStringTree());
ast=parse("START TRANSACTION READ WRITE, ISOLATION LEVEL SNAPSHOT");
Assert.assertEquals("AST doesn't match","(tok_start_transaction (tok_txn_access_mode tok_txn_read_write) (tok_isolation_level tok_isolation_snapshot))",ast.toStringTree());
}
EqualityVerifier
@Test public void testTxnCommitRollback() throws ParseException {
ASTNode ast=parse("COMMIT");
Assert.assertEquals("AST doesn't match","tok_commit",ast.toStringTree());
ast=parse("COMMIT WORK");
Assert.assertEquals("AST doesn't match","tok_commit",ast.toStringTree());
ast=parse("ROLLBACK");
Assert.assertEquals("AST doesn't match","tok_rollback",ast.toStringTree());
ast=parse("ROLLBACK WORK");
Assert.assertEquals("AST doesn't match","tok_rollback",ast.toStringTree());
}
EqualityVerifier
@Test public void testAutoCommit() throws ParseException {
ASTNode ast=parse("SET AUTOCOMMIT TRUE");
Assert.assertEquals("AST doesn't match","(tok_set_autocommit tok_true)",ast.toStringTree());
ast=parse("SET AUTOCOMMIT FALSE");
Assert.assertEquals("AST doesn't match","(tok_set_autocommit tok_false)",ast.toStringTree());
}
Class: org.apache.hadoop.hive.ql.plan.TestConditionalResolverCommonJoin InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testResolvingDriverAlias() throws Exception {
ConditionalResolverCommonJoin resolver=new ConditionalResolverCommonJoin();
HashMap> pathToAliases=new HashMap>();
pathToAliases.put("path1",new ArrayList(Arrays.asList("alias1","alias2")));
pathToAliases.put("path2",new ArrayList(Arrays.asList("alias3")));
HashMap aliasToKnownSize=new HashMap();
aliasToKnownSize.put("alias1",1024l);
aliasToKnownSize.put("alias2",2048l);
aliasToKnownSize.put("alias3",4096l);
DDLTask task1=new DDLTask();
task1.setId("alias2");
DDLTask task2=new DDLTask();
task2.setId("alias3");
HashMap,Set> taskToAliases=new LinkedHashMap,Set>();
taskToAliases.put(task1,new HashSet(Arrays.asList("alias2")));
taskToAliases.put(task2,new HashSet(Arrays.asList("alias3")));
ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx ctx=new ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx();
ctx.setPathToAliases(pathToAliases);
ctx.setTaskToAliases(taskToAliases);
ctx.setAliasToKnownSize(aliasToKnownSize);
HiveConf conf=new HiveConf();
conf.setLongVar(HiveConf.ConfVars.HIVESMALLTABLESFILESIZE,4096);
Task resolved=resolver.resolveMapJoinTask(ctx,conf);
Assert.assertEquals("alias3",resolved.getId());
conf.setLongVar(HiveConf.ConfVars.HIVESMALLTABLESFILESIZE,65536);
resolved=resolver.resolveMapJoinTask(ctx,conf);
Assert.assertEquals("alias3",resolved.getId());
conf.setLongVar(HiveConf.ConfVars.HIVESMALLTABLESFILESIZE,2048);
resolved=resolver.resolveMapJoinTask(ctx,conf);
Assert.assertNull(resolved);
}
Class: org.apache.hadoop.hive.ql.plan.TestCreateMacroDesc InternalCallVerifier EqualityVerifier
@Test public void testCreateMacroDesc() throws Exception {
CreateMacroDesc desc=new CreateMacroDesc(name,colNames,colTypes,bodyDesc);
Assert.assertEquals(name,desc.getMacroName());
Assert.assertEquals(bodyDesc,desc.getBody());
Assert.assertEquals(colNames,desc.getColNames());
Assert.assertEquals(colTypes,desc.getColTypes());
}
Class: org.apache.hadoop.hive.ql.plan.TestDropMacroDesc EqualityVerifier
@Test public void testCreateMacroDesc() throws Exception {
DropMacroDesc desc=new DropMacroDesc(name);
Assert.assertEquals(name,desc.getMacroName());
}
Class: org.apache.hadoop.hive.ql.plan.TestReadEntityDirect BranchVerifier UtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
/**
* Underlying table of view should be marked as direct, as it is also accessed
* directly in the join query
* @throws ParseException
*/
@Test public void testSelectEntityViewDirectJoin() throws ParseException {
Driver driver=createDriver();
int ret=driver.compile("select * from v1 join t1 on (v1.i = t1.i)");
assertEquals("Checking command success",0,ret);
assertEquals(2,CheckInputReadEntityDirect.readEntities.size());
for ( ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) {
if (readEntity.getName().equals("default@t1")) {
assertTrue("direct",readEntity.isDirect());
}
else if (readEntity.getName().equals("default@v1")) {
assertTrue("direct",readEntity.isDirect());
}
else {
fail("unexpected entity name " + readEntity.getName());
}
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
/**
* No views in the query so it should be a direct entity
* @throws ParseException
*/
@Test public void testSelectEntityDirect() throws ParseException {
Driver driver=createDriver();
int ret=driver.compile("select * from t1");
assertEquals("Checking command success",0,ret);
assertEquals(1,CheckInputReadEntityDirect.readEntities.size());
assertTrue("isDirect",CheckInputReadEntityDirect.readEntities.iterator().next().isDirect());
}
BranchVerifier UtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
/**
* Underlying table of view should be marked as indirect
* @throws ParseException
*/
@Test public void testSelectEntityInDirect() throws ParseException {
Driver driver=createDriver();
int ret=driver.compile("select * from v1");
assertEquals("Checking command success",0,ret);
assertEquals(2,CheckInputReadEntityDirect.readEntities.size());
for ( ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) {
if (readEntity.getName().equals("default@t1")) {
assertFalse("not direct",readEntity.isDirect());
}
else if (readEntity.getName().equals("default@v1")) {
assertTrue("direct",readEntity.isDirect());
}
else {
fail("unexpected entity name " + readEntity.getName());
}
}
}
BranchVerifier UtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
/**
* Underlying table of view should be marked as direct, as it is also accessed
* directly in the union-all query
* @throws ParseException
*/
@Test public void testSelectEntityViewDirectUnion() throws ParseException {
Driver driver=createDriver();
int ret=driver.compile("select * from ( select * from v1 union all select * from t1) uv1t1");
assertEquals("Checking command success",0,ret);
assertEquals(2,CheckInputReadEntityDirect.readEntities.size());
for ( ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) {
if (readEntity.getName().equals("default@t1")) {
assertTrue("direct",readEntity.isDirect());
}
else if (readEntity.getName().equals("default@v1")) {
assertTrue("direct",readEntity.isDirect());
}
else {
fail("unexpected entity name " + readEntity.getName());
}
}
}
BranchVerifier UtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
/**
* Underlying table of view should be marked as indirect. Query with join of views and aliases
* @throws ParseException
*/
@Test public void testSelectEntityInDirectJoinAlias() throws ParseException {
Driver driver=createDriver();
int ret=driver.compile("select * from v1 as a join v1 as b on (a.i = b.i)");
assertEquals("Checking command success",0,ret);
assertEquals(2,CheckInputReadEntityDirect.readEntities.size());
for ( ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) {
if (readEntity.getName().equals("default@t1")) {
assertFalse("not direct",readEntity.isDirect());
}
else if (readEntity.getName().equals("default@v1")) {
assertTrue("direct",readEntity.isDirect());
}
else {
fail("unexpected entity name " + readEntity.getName());
}
}
}
Class: org.apache.hadoop.hive.ql.plan.TestTezWork APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testRemove() throws Exception {
BaseWork parent=nodes.get(0);
BaseWork children[]={nodes.get(1),nodes.get(2)};
TezEdgeProperty edgeProp=new TezEdgeProperty(EdgeType.SIMPLE_EDGE);
work.connect(parent,children[0],edgeProp);
work.connect(parent,children[1],edgeProp);
work.remove(parent);
Assert.assertEquals(work.getParents(children[0]).size(),0);
Assert.assertEquals(work.getParents(children[1]).size(),0);
Assert.assertEquals(work.getAllWork().size(),nodes.size() - 1);
Assert.assertEquals(work.getRoots().size(),nodes.size() - 1);
Assert.assertEquals(work.getLeaves().size(),nodes.size() - 1);
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testBroadcastConnect() throws Exception {
BaseWork parent=nodes.get(0);
BaseWork child=nodes.get(1);
TezEdgeProperty edgeProp=new TezEdgeProperty(EdgeType.BROADCAST_EDGE);
work.connect(parent,child,edgeProp);
Assert.assertEquals(work.getParents(child).size(),1);
Assert.assertEquals(work.getChildren(parent).size(),1);
Assert.assertEquals(work.getChildren(parent).get(0),child);
Assert.assertEquals(work.getParents(child).get(0),parent);
Assert.assertTrue(work.getRoots().contains(parent) && !work.getRoots().contains(child));
Assert.assertTrue(!work.getLeaves().contains(parent) && work.getLeaves().contains(child));
for ( BaseWork w : nodes) {
if (w == parent || w == child) {
continue;
}
Assert.assertEquals(work.getParents(w).size(),0);
Assert.assertEquals(work.getChildren(w).size(),0);
}
Assert.assertEquals(work.getEdgeProperty(parent,child).getEdgeType(),EdgeType.BROADCAST_EDGE);
}
IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testGetAllWork() throws Exception {
TezEdgeProperty edgeProp=new TezEdgeProperty(EdgeType.SIMPLE_EDGE);
for (int i=4; i > 0; --i) {
work.connect(nodes.get(i),nodes.get(i - 1),edgeProp);
}
List sorted=work.getAllWork();
for (int i=0; i < 5; ++i) {
Assert.assertEquals(sorted.get(i),nodes.get(4 - i));
}
}
InternalCallVerifier EqualityVerifier
@Test public void testConfigureJarsWithNull() throws Exception {
final JobConf conf=new JobConf();
conf.set(MR_JAR_PROPERTY,"file:///tmp/foo1.jar");
BaseWork baseWork=Mockito.mock(BaseWork.class);
Mockito.doAnswer(new Answer(){
@Override public Void answer( InvocationOnMock invocation) throws Throwable {
conf.unset(MR_JAR_PROPERTY);
return null;
}
}
).when(baseWork).configureJobConf(conf);
work.add(baseWork);
work.configureJobConfAndExtractJars(conf);
Assert.assertEquals("file:///tmp/foo1.jar",conf.get(MR_JAR_PROPERTY));
}
InternalCallVerifier EqualityVerifier
@Test public void testConfigureJarsStartingWithNull() throws Exception {
final JobConf conf=new JobConf();
conf.unset(MR_JAR_PROPERTY);
BaseWork baseWork=Mockito.mock(BaseWork.class);
Mockito.doAnswer(new Answer(){
@Override public Void answer( InvocationOnMock invocation) throws Throwable {
conf.setStrings(MR_JAR_PROPERTY,"file:///tmp/foo1.jar","file:///tmp/foo2.jar");
return null;
}
}
).when(baseWork).configureJobConf(conf);
work.add(baseWork);
work.configureJobConfAndExtractJars(conf);
Assert.assertEquals("file:///tmp/foo1.jar,file:///tmp/foo2.jar",conf.get(MR_JAR_PROPERTY));
}
InternalCallVerifier EqualityVerifier
@Test public void testAdd() throws Exception {
Assert.assertEquals(work.getAllWork().size(),nodes.size());
Assert.assertEquals(work.getRoots().size(),nodes.size());
Assert.assertEquals(work.getLeaves().size(),nodes.size());
for ( BaseWork w : nodes) {
Assert.assertEquals(work.getParents(w).size(),0);
Assert.assertEquals(work.getChildren(w).size(),0);
}
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testConnect() throws Exception {
BaseWork parent=nodes.get(0);
BaseWork child=nodes.get(1);
TezEdgeProperty edgeProp=new TezEdgeProperty(EdgeType.SIMPLE_EDGE);
work.connect(parent,child,edgeProp);
Assert.assertEquals(work.getParents(child).size(),1);
Assert.assertEquals(work.getChildren(parent).size(),1);
Assert.assertEquals(work.getChildren(parent).get(0),child);
Assert.assertEquals(work.getParents(child).get(0),parent);
Assert.assertTrue(work.getRoots().contains(parent) && !work.getRoots().contains(child));
Assert.assertTrue(!work.getLeaves().contains(parent) && work.getLeaves().contains(child));
for ( BaseWork w : nodes) {
if (w == parent || w == child) {
continue;
}
Assert.assertEquals(work.getParents(w).size(),0);
Assert.assertEquals(work.getChildren(w).size(),0);
}
Assert.assertEquals(work.getEdgeProperty(parent,child).getEdgeType(),EdgeType.SIMPLE_EDGE);
}
InternalCallVerifier EqualityVerifier
@Test public void testConfigureJarsNoExtraJars() throws Exception {
final JobConf conf=new JobConf();
conf.set(MR_JAR_PROPERTY,"file:///tmp/foo1.jar");
BaseWork baseWork=Mockito.mock(BaseWork.class);
work.add(baseWork);
work.configureJobConfAndExtractJars(conf);
Assert.assertEquals("file:///tmp/foo1.jar",conf.get(MR_JAR_PROPERTY));
}
InternalCallVerifier EqualityVerifier
@Test public void testConfigureJars() throws Exception {
final JobConf conf=new JobConf();
conf.set(MR_JAR_PROPERTY,"file:///tmp/foo1.jar");
BaseWork baseWork=Mockito.mock(BaseWork.class);
Mockito.doAnswer(new Answer(){
@Override public Void answer( InvocationOnMock invocation) throws Throwable {
conf.set(MR_JAR_PROPERTY,"file:///tmp/foo2.jar");
return null;
}
}
).when(baseWork).configureJobConf(conf);
work.add(baseWork);
work.configureJobConfAndExtractJars(conf);
Assert.assertEquals("file:///tmp/foo1.jar,file:///tmp/foo2.jar",conf.get(MR_JAR_PROPERTY));
}
Class: org.apache.hadoop.hive.ql.plan.TestViewEntity BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
/**
* Verify that the parent entities are captured correctly for union views
* @throws Exception
*/
@Test public void testUnionView() throws Exception {
String prefix="tunionview" + NAME_PREFIX;
final String tab1=prefix + "t1";
final String tab2=prefix + "t2";
final String view1=prefix + "v1";
int ret=driver.run("create table " + tab1 + "(id int)").getResponseCode();
assertEquals("Checking command success",0,ret);
ret=driver.run("create table " + tab2 + "(id int)").getResponseCode();
assertEquals("Checking command success",0,ret);
ret=driver.run("create view " + view1 + " as select t.id from "+ "(select "+ tab1+ ".id from "+ tab1+ " union all select "+ tab2+ ".id from "+ tab2+ ") as t").getResponseCode();
assertEquals("Checking command success",0,ret);
driver.compile("select * from " + view1);
assertEquals("default@" + view1,CheckInputReadEntity.readEntities[0].getName());
assertEquals("default@" + tab1,CheckInputReadEntity.readEntities[1].getName());
assertFalse("Table is not direct input",CheckInputReadEntity.readEntities[1].isDirect());
assertEquals("default@" + view1,CheckInputReadEntity.readEntities[1].getParents().iterator().next().getName());
assertEquals("default@" + tab2,CheckInputReadEntity.readEntities[2].getName());
assertFalse("Table is not direct input",CheckInputReadEntity.readEntities[2].isDirect());
assertEquals("default@" + view1,CheckInputReadEntity.readEntities[2].getParents().iterator().next().getName());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
/**
* Verify that the parent entities are captured correctly for view in subquery
* @throws Exception
*/
@Test public void testViewInSubQuery() throws Exception {
String prefix="tvsubquery" + NAME_PREFIX;
final String tab1=prefix + "t";
final String view1=prefix + "v";
int ret=driver.run("create table " + tab1 + "(id int)").getResponseCode();
assertEquals("Checking command success",0,ret);
ret=driver.run("create view " + view1 + " as select * from "+ tab1).getResponseCode();
assertEquals("Checking command success",0,ret);
driver.compile("select * from " + view1);
assertEquals("default@" + view1,CheckInputReadEntity.readEntities[0].getName());
assertEquals("default@" + tab1,CheckInputReadEntity.readEntities[1].getName());
assertFalse("Table is not direct input",CheckInputReadEntity.readEntities[1].isDirect());
}
Class: org.apache.hadoop.hive.ql.processors.TestCommandProcessorFactory APIUtilityVerifier UtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testAvailableCommands() throws Exception {
enableTestOnlyCmd(conf);
SessionState.start(conf);
for ( HiveCommand command : HiveCommand.values()) {
String cmd=command.name();
String cmdInLowerCase=cmd.toLowerCase();
Assert.assertNotNull("Cmd " + cmd + " not return null",CommandProcessorFactory.getForHiveCommandInternal(new String[]{cmd},conf,command.isOnlyForTesting()));
Assert.assertNotNull("Cmd " + cmd + " not return null",CommandProcessorFactory.getForHiveCommandInternal(new String[]{cmdInLowerCase},conf,command.isOnlyForTesting()));
}
conf.set(HiveConf.ConfVars.HIVE_SECURITY_COMMAND_WHITELIST.toString(),"");
for ( HiveCommand command : HiveCommand.values()) {
String cmd=command.name();
try {
CommandProcessorFactory.getForHiveCommandInternal(new String[]{cmd},conf,command.isOnlyForTesting());
Assert.fail("Expected SQLException for " + cmd + " as available commands is empty");
}
catch ( SQLException e) {
Assert.assertEquals("Insufficient privileges to execute " + cmd,e.getMessage());
Assert.assertEquals("42000",e.getSQLState());
}
}
}
Class: org.apache.hadoop.hive.ql.processors.TestCompileProcessor BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testSyntax() throws Exception {
CompileProcessor cp=new CompileProcessor();
Assert.assertEquals(0,cp.run("` public class x { \n }` AS GROOVY NAMED x.groovy").getResponseCode());
Assert.assertEquals("GROOVY",cp.getLang());
Assert.assertEquals(" public class x { \n }",cp.getCode());
Assert.assertEquals("x.groovy",cp.getNamed());
Assert.assertEquals(1,cp.run("").getResponseCode());
Assert.assertEquals(1,cp.run("bla bla ").getResponseCode());
CompileProcessor cp2=new CompileProcessor();
CommandProcessorResponse response=cp2.run("` import org.apache.hadoop.hive.ql.exec.UDF \n public class x { \n }` AS GROOVY NAMED x.groovy");
Assert.assertEquals(0,response.getResponseCode());
File f=new File(response.getErrorMessage());
Assert.assertTrue(f.exists());
f.delete();
}
Class: org.apache.hadoop.hive.ql.security.FolderPermissionBase BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testCtas() throws Exception {
String testDb="ctasdb";
String tableName="createtable";
CommandProcessorResponse ret=driver.run("CREATE DATABASE " + testDb);
Assert.assertEquals(0,ret.getResponseCode());
assertExistence(warehouseDir + "/" + testDb+ ".db");
setPermission(warehouseDir + "/" + testDb+ ".db");
verifyPermission(warehouseDir + "/" + testDb+ ".db");
ret=driver.run("USE " + testDb);
Assert.assertEquals(0,ret.getResponseCode());
ret=driver.run("create table " + tableName + " as select key,value from default.mysrc");
Assert.assertEquals(0,ret.getResponseCode());
assertExistence(warehouseDir + "/" + testDb+ ".db/"+ tableName);
verifyPermission(warehouseDir + "/" + testDb+ ".db/"+ tableName);
Assert.assertTrue(listStatus(warehouseDir + "/" + testDb+ ".db/"+ tableName).size() > 0);
for ( String child : listStatus(warehouseDir + "/" + testDb+ ".db/"+ tableName)) {
verifyPermission(child);
}
ret=driver.run("USE default");
Assert.assertEquals(0,ret.getResponseCode());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testLoad() throws Exception {
String tableName="load";
String location="/hdfsPath";
fs.copyFromLocalFile(dataFilePath,new Path(location));
CommandProcessorResponse ret=driver.run("CREATE TABLE " + tableName + " (key string, value string)");
Assert.assertEquals(0,ret.getResponseCode());
String tableLoc=warehouseDir + "/" + tableName;
assertExistence(warehouseDir + "/" + tableName);
setPermission(warehouseDir + "/" + tableName);
ret=driver.run("load data inpath '" + location + "' into table "+ tableName);
Assert.assertEquals(0,ret.getResponseCode());
Assert.assertTrue(listStatus(tableLoc).size() > 0);
for ( String child : listStatus(tableLoc)) {
verifyPermission(child);
}
setPermission(warehouseDir + "/" + tableName,1);
for ( String child : listStatus(tableLoc)) {
setPermission(child,1);
}
fs.copyFromLocalFile(dataFilePath,new Path(location));
ret=driver.run("load data inpath '" + location + "' overwrite into table "+ tableName);
Assert.assertEquals(0,ret.getResponseCode());
Assert.assertTrue(listStatus(tableLoc).size() > 0);
for ( String child : listStatus(tableLoc)) {
verifyPermission(child,1);
}
tableName="loadpartition";
ret=driver.run("CREATE TABLE " + tableName + " (key string, value string) partitioned by (part1 int, part2 int)");
Assert.assertEquals(0,ret.getResponseCode());
tableLoc=warehouseDir + "/" + tableName;
assertExistence(tableLoc);
setPermission(tableLoc);
fs.copyFromLocalFile(dataFilePath,new Path(location));
ret=driver.run("LOAD DATA INPATH '" + location + "' INTO TABLE "+ tableName+ " PARTITION (part1='1',part2='1')");
Assert.assertEquals(0,ret.getResponseCode());
String partLoc=warehouseDir + "/" + tableName+ "/part1=1/part2=1";
Assert.assertTrue(listStatus(partLoc).size() > 0);
for ( String child : listStatus(partLoc)) {
verifyPermission(child);
}
setPermission(tableLoc,1);
setPermission(partLoc,1);
Assert.assertTrue(listStatus(partLoc).size() > 0);
for ( String child : listStatus(partLoc)) {
setPermission(child,1);
}
fs.copyFromLocalFile(dataFilePath,new Path(location));
ret=driver.run("LOAD DATA INPATH '" + location + "' OVERWRITE INTO TABLE "+ tableName+ " PARTITION (part1='1',part2='1')");
Assert.assertEquals(0,ret.getResponseCode());
Assert.assertTrue(listStatus(tableLoc).size() > 0);
for ( String child : listStatus(partLoc)) {
verifyPermission(child,1);
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testInsertStaticSinglePartition() throws Exception {
String tableName="singlestaticpart";
CommandProcessorResponse ret=driver.run("CREATE TABLE " + tableName + " (key string, value string) partitioned by (part1 string)");
Assert.assertEquals(0,ret.getResponseCode());
assertExistence(warehouseDir + "/" + tableName);
setPermission(warehouseDir + "/" + tableName);
ret=driver.run("insert into table " + tableName + " partition(part1='1') select key,value from mysrc where part1='1' and part2='1'");
Assert.assertEquals(0,ret.getResponseCode());
verifyPermission(warehouseDir + "/" + tableName);
verifyPermission(warehouseDir + "/" + tableName+ "/part1=1");
Assert.assertTrue(listStatus(warehouseDir + "/" + tableName+ "/part1=1").size() > 0);
for ( String child : listStatus(warehouseDir + "/" + tableName+ "/part1=1")) {
verifyPermission(child);
}
setPermission(warehouseDir + "/" + tableName,1);
setPermission(warehouseDir + "/" + tableName+ "/part1=1",1);
ret=driver.run("insert overwrite table " + tableName + " partition(part1='1') select key,value from mysrc where part1='1' and part2='1'");
Assert.assertEquals(0,ret.getResponseCode());
verifyPermission(warehouseDir + "/" + tableName,1);
verifyPermission(warehouseDir + "/" + tableName+ "/part1=1",1);
Assert.assertTrue(listStatus(warehouseDir + "/" + tableName+ "/part1=1").size() > 0);
for ( String child : listStatus(warehouseDir + "/" + tableName+ "/part1=1")) {
verifyPermission(child,1);
}
}
InternalCallVerifier EqualityVerifier
/**
* Tests the permission to the table doesn't change after the truncation
* @throws Exception
*/
@Test public void testTruncateTable() throws Exception {
String tableName="truncatetable";
String partition=warehouseDir + "/" + tableName+ "/part1=1";
CommandProcessorResponse ret=driver.run("CREATE TABLE " + tableName + " (key STRING, value STRING) PARTITIONED BY (part1 INT)");
Assert.assertEquals(0,ret.getResponseCode());
setPermission(warehouseDir + "/" + tableName);
ret=driver.run("insert into table " + tableName + " partition(part1='1') select key,value from mysrc where part1='1' and part2='1'");
Assert.assertEquals(0,ret.getResponseCode());
assertExistence(warehouseDir + "/" + tableName);
verifyPermission(warehouseDir + "/" + tableName);
verifyPermission(partition);
ret=driver.run("TRUNCATE TABLE " + tableName);
Assert.assertEquals(0,ret.getResponseCode());
assertExistence(warehouseDir + "/" + tableName);
verifyPermission(warehouseDir + "/" + tableName);
ret=driver.run("insert into table " + tableName + " partition(part1='1') select key,value from mysrc where part1='1' and part2='1'");
Assert.assertEquals(0,ret.getResponseCode());
verifyPermission(warehouseDir + "/" + tableName);
assertExistence(partition);
verifyPermission(partition);
ret=driver.run("TRUNCATE TABLE " + tableName + " partition(part1='1')");
Assert.assertEquals(0,ret.getResponseCode());
assertExistence(partition);
verifyPermission(partition);
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testInsertStaticDualPartition() throws Exception {
String tableName="dualstaticpart";
CommandProcessorResponse ret=driver.run("CREATE TABLE " + tableName + " (key string, value string) partitioned by (part1 string, part2 string)");
Assert.assertEquals(0,ret.getResponseCode());
assertExistence(warehouseDir + "/" + tableName);
setPermission(warehouseDir + "/" + tableName);
ret=driver.run("insert into table " + tableName + " partition(part1='1', part2='1') select key,value from mysrc where part1='1' and part2='1'");
Assert.assertEquals(0,ret.getResponseCode());
verifyPermission(warehouseDir + "/" + tableName);
verifyPermission(warehouseDir + "/" + tableName+ "/part1=1");
verifyPermission(warehouseDir + "/" + tableName+ "/part1=1/part2=1");
Assert.assertTrue(listStatus(warehouseDir + "/" + tableName+ "/part1=1/part2=1").size() > 0);
for ( String child : listStatus(warehouseDir + "/" + tableName+ "/part1=1/part2=1")) {
verifyPermission(child);
}
setPermission(warehouseDir + "/" + tableName,1);
setPermission(warehouseDir + "/" + tableName+ "/part1=1",1);
setPermission(warehouseDir + "/" + tableName+ "/part1=1/part2=1",1);
ret=driver.run("insert overwrite table " + tableName + " partition(part1='1', part2='1') select key,value from mysrc where part1='1' and part2='1'");
Assert.assertEquals(0,ret.getResponseCode());
verifyPermission(warehouseDir + "/" + tableName,1);
verifyPermission(warehouseDir + "/" + tableName+ "/part1=1",1);
verifyPermission(warehouseDir + "/" + tableName+ "/part1=1/part2=1",1);
Assert.assertTrue(listStatus(warehouseDir + "/" + tableName+ "/part1=1/part2=1").size() > 0);
for ( String child : listStatus(warehouseDir + "/" + tableName+ "/part1=1/part2=1")) {
verifyPermission(child,1);
}
}
InternalCallVerifier EqualityVerifier
@Test public void testInsertDualDynamicPartitions() throws Exception {
String tableName="dualdynamicpart";
CommandProcessorResponse ret=driver.run("CREATE TABLE " + tableName + " (key string, value string) partitioned by (part1 string, part2 string)");
Assert.assertEquals(0,ret.getResponseCode());
assertExistence(warehouseDir + "/" + tableName);
setPermission(warehouseDir + "/" + tableName,0);
ret=driver.run("insert into table " + tableName + " partition (part1,part2) select key,value,part1,part2 from mysrc");
Assert.assertEquals(0,ret.getResponseCode());
verifyDualPartitionTable(warehouseDir + "/" + tableName,0);
setDualPartitionTable(warehouseDir + "/" + tableName,1);
ret=driver.run("insert overwrite table " + tableName + " partition (part1,part2) select key,value,part1,part2 from mysrc");
Assert.assertEquals(0,ret.getResponseCode());
verifyDualPartitionTable(warehouseDir + "/" + tableName,1);
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testCreateDb() throws Exception {
String testDb="mydb";
String tableName="createtable";
setPermission(warehouseDir.toString());
verifyPermission(warehouseDir.toString());
CommandProcessorResponse ret=driver.run("CREATE DATABASE " + testDb);
Assert.assertEquals(0,ret.getResponseCode());
assertExistence(warehouseDir + "/" + testDb+ ".db");
verifyPermission(warehouseDir + "/" + testDb+ ".db");
ret=driver.run("USE " + testDb);
Assert.assertEquals(0,ret.getResponseCode());
ret=driver.run("CREATE TABLE " + tableName + " (key string, value string)");
Assert.assertEquals(0,ret.getResponseCode());
verifyPermission(warehouseDir + "/" + testDb+ ".db/"+ tableName);
ret=driver.run("insert into table " + tableName + " select key,value from default.mysrc");
Assert.assertEquals(0,ret.getResponseCode());
assertExistence(warehouseDir + "/" + testDb+ ".db/"+ tableName);
verifyPermission(warehouseDir + "/" + testDb+ ".db/"+ tableName);
Assert.assertTrue(listStatus(warehouseDir + "/" + testDb+ ".db/"+ tableName).size() > 0);
for ( String child : listStatus(warehouseDir + "/" + testDb+ ".db/"+ tableName)) {
verifyPermission(child);
}
ret=driver.run("USE default");
Assert.assertEquals(0,ret.getResponseCode());
fs.delete(warehouseDir,true);
fs.mkdirs(warehouseDir);
Assert.assertEquals(listStatus(warehouseDir.toString()).size(),0);
setupDataTable();
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testLoadLocal() throws Exception {
String tableName="loadlocal";
CommandProcessorResponse ret=driver.run("CREATE TABLE " + tableName + " (key string, value string)");
Assert.assertEquals(0,ret.getResponseCode());
String tableLoc=warehouseDir + "/" + tableName;
assertExistence(warehouseDir + "/" + tableName);
setPermission(warehouseDir + "/" + tableName);
ret=driver.run("load data local inpath '" + dataFilePath + "' into table "+ tableName);
Assert.assertEquals(0,ret.getResponseCode());
Assert.assertTrue(listStatus(tableLoc).size() > 0);
for ( String child : listStatus(tableLoc)) {
verifyPermission(child);
}
setPermission(warehouseDir + "/" + tableName,1);
for ( String child : listStatus(tableLoc)) {
setPermission(child,1);
}
ret=driver.run("load data local inpath '" + dataFilePath + "' overwrite into table "+ tableName);
Assert.assertEquals(0,ret.getResponseCode());
Assert.assertTrue(listStatus(tableLoc).size() > 0);
for ( String child : listStatus(tableLoc)) {
verifyPermission(child,1);
}
tableName="loadlocalpartition";
ret=driver.run("CREATE TABLE " + tableName + " (key string, value string) partitioned by (part1 int, part2 int)");
Assert.assertEquals(0,ret.getResponseCode());
tableLoc=warehouseDir + "/" + tableName;
assertExistence(tableLoc);
setPermission(tableLoc);
ret=driver.run("LOAD DATA LOCAL INPATH '" + dataFilePath + "' INTO TABLE "+ tableName+ " PARTITION (part1='1',part2='1')");
Assert.assertEquals(0,ret.getResponseCode());
String partLoc=warehouseDir + "/" + tableName+ "/part1=1/part2=1";
Assert.assertTrue(listStatus(partLoc).size() > 0);
for ( String child : listStatus(partLoc)) {
verifyPermission(child);
}
setPermission(tableLoc,1);
setPermission(partLoc,1);
for ( String child : listStatus(partLoc)) {
setPermission(child,1);
}
ret=driver.run("LOAD DATA LOCAL INPATH '" + dataFilePath + "' OVERWRITE INTO TABLE "+ tableName+ " PARTITION (part1='1',part2='1')");
Assert.assertEquals(0,ret.getResponseCode());
Assert.assertTrue(listStatus(tableLoc).size() > 0);
for ( String child : listStatus(partLoc)) {
verifyPermission(child,1);
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testInsertNonPartTable() throws Exception {
String tableName="nonpart";
CommandProcessorResponse ret=driver.run("CREATE TABLE " + tableName + " (key string, value string)");
Assert.assertEquals(0,ret.getResponseCode());
String tableLoc=warehouseDir + "/" + tableName;
assertExistence(warehouseDir + "/" + tableName);
setPermission(warehouseDir + "/" + tableName);
ret=driver.run("insert into table " + tableName + " select key,value from mysrc");
Assert.assertEquals(0,ret.getResponseCode());
verifyPermission(warehouseDir + "/" + tableName);
Assert.assertTrue(listStatus(tableLoc).size() > 0);
for ( String child : listStatus(tableLoc)) {
verifyPermission(child);
}
setPermission(warehouseDir + "/" + tableName,1);
ret=driver.run("insert overwrite table " + tableName + " select key,value from mysrc");
Assert.assertEquals(0,ret.getResponseCode());
verifyPermission(warehouseDir + "/" + tableName,1);
Assert.assertTrue(listStatus(tableLoc).size() > 0);
for ( String child : listStatus(tableLoc)) {
verifyPermission(child,1);
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testCreateTable() throws Exception {
String testDb="mydb2";
String tableName="createtable";
CommandProcessorResponse ret=driver.run("CREATE DATABASE " + testDb);
Assert.assertEquals(0,ret.getResponseCode());
assertExistence(warehouseDir + "/" + testDb+ ".db");
setPermission(warehouseDir + "/" + testDb+ ".db");
verifyPermission(warehouseDir + "/" + testDb+ ".db");
ret=driver.run("USE " + testDb);
Assert.assertEquals(0,ret.getResponseCode());
ret=driver.run("CREATE TABLE " + tableName + " (key string, value string)");
Assert.assertEquals(0,ret.getResponseCode());
verifyPermission(warehouseDir + "/" + testDb+ ".db/"+ tableName);
ret=driver.run("insert into table " + tableName + " select key,value from default.mysrc");
Assert.assertEquals(0,ret.getResponseCode());
assertExistence(warehouseDir + "/" + testDb+ ".db/"+ tableName);
verifyPermission(warehouseDir + "/" + testDb+ ".db/"+ tableName);
Assert.assertTrue(listStatus(warehouseDir + "/" + testDb+ ".db/"+ tableName).size() > 0);
for ( String child : listStatus(warehouseDir + "/" + testDb+ ".db/"+ tableName)) {
verifyPermission(child);
}
ret=driver.run("USE default");
Assert.assertEquals(0,ret.getResponseCode());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testPartition() throws Exception {
String tableName="alterpart";
CommandProcessorResponse ret=driver.run("CREATE TABLE " + tableName + " (key string, value string) partitioned by (part1 int, part2 int, part3 int)");
Assert.assertEquals(0,ret.getResponseCode());
assertExistence(warehouseDir + "/" + tableName);
setPermission(warehouseDir + "/" + tableName);
ret=driver.run("insert into table " + tableName + " partition(part1='1',part2='1',part3='1') select key,value from mysrc");
Assert.assertEquals(0,ret.getResponseCode());
assertExistence(warehouseDir + "/" + tableName);
setPermission(warehouseDir + "/" + tableName,1);
ret=driver.run("alter table " + tableName + " partition (part1='1',part2='1',part3='1') rename to partition (part1='2',part2='2',part3='2')");
Assert.assertEquals(0,ret.getResponseCode());
verifyPermission(warehouseDir + "/" + tableName+ "/part1=2",1);
verifyPermission(warehouseDir + "/" + tableName+ "/part1=2/part2=2",1);
verifyPermission(warehouseDir + "/" + tableName+ "/part1=2/part2=2/part3=2",1);
Assert.assertTrue(listStatus(warehouseDir + "/" + tableName+ "/part1=2/part2=2/part3=2").size() > 0);
for ( String child : listStatus(warehouseDir + "/" + tableName+ "/part1=2/part2=2/part3=2")) {
verifyPermission(child,1);
}
String tableName2="alterpart2";
ret=driver.run("CREATE TABLE " + tableName2 + " (key string, value string) partitioned by (part1 int, part2 int, part3 int)");
Assert.assertEquals(0,ret.getResponseCode());
assertExistence(warehouseDir + "/" + tableName2);
setPermission(warehouseDir + "/" + tableName2);
ret=driver.run("alter table " + tableName2 + " exchange partition (part1='2',part2='2',part3='2') with table "+ tableName);
Assert.assertEquals(0,ret.getResponseCode());
verifyPermission(warehouseDir + "/" + tableName2+ "/part1=2",0);
verifyPermission(warehouseDir + "/" + tableName2+ "/part1=2/part2=2",0);
verifyPermission(warehouseDir + "/" + tableName2+ "/part1=2/part2=2/part3=2",1);
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testExternalTable() throws Exception {
String tableName="externaltable";
String myLocation=warehouseDir + "/myfolder";
FileSystem fs=FileSystem.get(new URI(myLocation),conf);
fs.mkdirs(new Path(myLocation));
setPermission(myLocation);
CommandProcessorResponse ret=driver.run("CREATE TABLE " + tableName + " (key string, value string) LOCATION '"+ myLocation+ "'");
Assert.assertEquals(0,ret.getResponseCode());
ret=driver.run("insert into table " + tableName + " select key,value from mysrc");
Assert.assertEquals(0,ret.getResponseCode());
Assert.assertTrue(listStatus(myLocation).size() > 0);
for ( String child : listStatus(myLocation)) {
verifyPermission(child);
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testExim() throws Exception {
String myLocation=warehouseDir + "/exim";
FileSystem fs=FileSystem.get(new URI(myLocation),conf);
fs.mkdirs(new Path(myLocation));
setPermission(myLocation);
myLocation=myLocation + "/temp";
CommandProcessorResponse ret=driver.run("export table mysrc to '" + myLocation + "'");
Assert.assertEquals(0,ret.getResponseCode());
assertExistence(myLocation);
verifyPermission(myLocation);
assertExistence(myLocation + "/part1=1/part2=1");
verifyPermission(myLocation + "/part1=1/part2=1");
Assert.assertTrue(listStatus(myLocation + "/part1=1/part2=1").size() > 0);
for ( String child : listStatus(myLocation + "/part1=1/part2=1")) {
verifyPermission(child);
}
assertExistence(myLocation + "/part1=2/part2=2");
verifyPermission(myLocation + "/part1=2/part2=2");
Assert.assertTrue(listStatus(myLocation + "/part1=2/part2=2").size() > 0);
for ( String child : listStatus(myLocation + "/part1=2/part2=2")) {
verifyPermission(child);
}
String testDb="eximdb";
ret=driver.run("CREATE DATABASE " + testDb);
Assert.assertEquals(0,ret.getResponseCode());
assertExistence(warehouseDir + "/" + testDb+ ".db");
setPermission(warehouseDir + "/" + testDb+ ".db",1);
ret=driver.run("USE " + testDb);
Assert.assertEquals(0,ret.getResponseCode());
ret=driver.run("import from '" + myLocation + "'");
Assert.assertEquals(0,ret.getResponseCode());
assertExistence(warehouseDir + "/" + testDb+ ".db/mysrc");
verifyPermission(warehouseDir + "/" + testDb+ ".db/mysrc",1);
myLocation=warehouseDir + "/" + testDb+ ".db/mysrc";
assertExistence(myLocation);
verifyPermission(myLocation,1);
assertExistence(myLocation + "/part1=1/part2=1");
verifyPermission(myLocation + "/part1=1/part2=1",1);
Assert.assertTrue(listStatus(myLocation + "/part1=1/part2=1").size() > 0);
for ( String child : listStatus(myLocation + "/part1=1/part2=1")) {
verifyPermission(child,1);
}
assertExistence(myLocation + "/part1=2/part2=2");
verifyPermission(myLocation + "/part1=2/part2=2",1);
Assert.assertTrue(listStatus(myLocation + "/part1=2/part2=2").size() > 0);
for ( String child : listStatus(myLocation + "/part1=2/part2=2")) {
verifyPermission(child,1);
}
}
InternalCallVerifier EqualityVerifier
@Test public void testInsertSingleDynamicPartition() throws Exception {
String tableName="singledynamicpart";
CommandProcessorResponse ret=driver.run("CREATE TABLE " + tableName + " (key string, value string) partitioned by (part1 string)");
Assert.assertEquals(0,ret.getResponseCode());
String tableLoc=warehouseDir + "/" + tableName;
assertExistence(tableLoc);
setPermission(tableLoc,0);
ret=driver.run("insert into table " + tableName + " partition (part1) select key,value,part1 from mysrc");
Assert.assertEquals(0,ret.getResponseCode());
verifySinglePartition(tableLoc,0);
setSinglePartition(tableLoc,1);
ret=driver.run("insert overwrite table " + tableName + " partition (part1) select key,value,part1 from mysrc");
Assert.assertEquals(0,ret.getResponseCode());
verifySinglePartition(tableLoc,1);
ret=driver.run("DROP TABLE " + tableName);
Assert.assertEquals(0,ret.getResponseCode());
ret=driver.run("CREATE TABLE " + tableName + " (key string, value string) partitioned by (part1 string)");
Assert.assertEquals(0,ret.getResponseCode());
assertExistence(warehouseDir + "/" + tableName);
setPermission(warehouseDir + "/" + tableName);
ret=driver.run("insert overwrite table " + tableName + " partition (part1) select key,value,part1 from mysrc");
Assert.assertEquals(0,ret.getResponseCode());
verifySinglePartition(tableLoc,0);
}
Class: org.apache.hadoop.hive.ql.security.TestMultiAuthorizationPreEventListener EqualityVerifier
@Test public void testMultipleAuthorizationListners() throws Exception {
String dbName="hive" + this.getClass().getSimpleName().toLowerCase();
List authCalls=DummyHiveMetastoreAuthorizationProvider.authCalls;
int listSize=0;
assertEquals(listSize,authCalls.size());
driver.run("create database " + dbName);
listSize=2;
assertEquals(listSize,authCalls.size());
Database db=msc.getDatabase(dbName);
listSize+=2;
Database dbFromEvent=(Database)assertAndExtractSingleObjectFromEvent(listSize,authCalls,DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.DB);
validateCreateDb(db,dbFromEvent);
}
Class: org.apache.hadoop.hive.ql.security.TestPasswordWithConfig InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testPassword() throws Exception {
String key1="key1";
String key2="key2";
String val1="value1";
Configuration conf=new Configuration();
conf.set(key1,val1);
assertEquals("key1 should exist in config",val1,ShimLoader.getHadoopShims().getPassword(conf,key1));
assertNull("key2 should not exist in config",ShimLoader.getHadoopShims().getPassword(conf,key2));
}
Class: org.apache.hadoop.hive.ql.security.TestPasswordWithCredentialProvider InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testPassword() throws Exception {
if (!doesHadoopPasswordAPIExist()) {
System.out.println("Skipping Password API test" + " because this version of hadoop-2 does not support the password API.");
return;
}
String credName="my.password";
String credName2="my.password2";
String credName3="my.password3";
String hiveConfPassword="conf value";
String credPassword="cred value";
String confOnlyPassword="abcdefg";
String credOnlyPassword="12345";
Configuration conf=new Configuration();
conf.set(credName,hiveConfPassword);
conf.set(credName2,confOnlyPassword);
assertEquals(hiveConfPassword,conf.get(credName));
assertEquals(confOnlyPassword,conf.get(credName2));
assertNull("credName3 should not exist in HiveConf",conf.get(credName3));
conf.set("hadoop.security.credential.clear-text-fallback","true");
conf.set("hadoop.security.credential.provider.path","jceks://file/" + tmpDir.toURI().getPath() + "/test.jks");
Class credentialProviderClass=Class.forName("org.apache.hadoop.security.alias.CredentialProvider");
Class credentialProviderFactoryClass=Class.forName("org.apache.hadoop.security.alias.CredentialProviderFactory");
Object provider=((List)invoke(credentialProviderFactoryClass,null,"getProviders",conf)).get(0);
invoke(credentialProviderClass,provider,"createCredentialEntry",credName,credPassword.toCharArray());
invoke(credentialProviderClass,provider,"createCredentialEntry",credName3,credOnlyPassword.toCharArray());
invoke(credentialProviderClass,provider,"flush");
assertEquals("getPassword() should use match value in credential provider",credPassword,ShimLoader.getHadoopShims().getPassword(conf,credName));
assertEquals("getPassword() should match value from conf",confOnlyPassword,ShimLoader.getHadoopShims().getPassword(conf,credName2));
assertEquals("getPassword() should use credential provider if conf has no value",credOnlyPassword,ShimLoader.getHadoopShims().getPassword(conf,credName3));
assertNull("null if neither cred provider or conf have entry",ShimLoader.getHadoopShims().getPassword(conf,"nonexistentkey"));
}
Class: org.apache.hadoop.hive.ql.security.TestStorageBasedMetastoreAuthorizationDrops InternalCallVerifier EqualityVerifier
/**
* Drop view should not be blocked by SBA. View will not have any location to drop.
* @throws Exception
*/
@Test public void testDropView() throws Exception {
String dbName=getTestDbName();
String tblName=getTestTableName();
String viewName="view" + tblName;
setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE),"-rwxrwxrwx");
CommandProcessorResponse resp=driver.run("create database " + dbName);
Assert.assertEquals(0,resp.getResponseCode());
Database db=msc.getDatabase(dbName);
validateCreateDb(db,dbName);
setPermissions(db.getLocationUri(),"-rwxrwxrwt");
String dbDotTable=dbName + "." + tblName;
resp=driver.run("create table " + dbDotTable + "(i int)");
Assert.assertEquals(0,resp.getResponseCode());
String dbDotView=dbName + "." + viewName;
resp=driver.run("create view " + dbDotView + " as select * from "+ dbDotTable);
Assert.assertEquals(0,resp.getResponseCode());
resp=driver.run("drop view " + dbDotView);
Assert.assertEquals(0,resp.getResponseCode());
resp=driver.run("drop table " + dbDotTable);
Assert.assertEquals(0,resp.getResponseCode());
}
Class: org.apache.hadoop.hive.ql.security.authorization.plugin.TestHiveAuthorizerCheckInvocation APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testInputNoColumnsUsed() throws HiveAuthzPluginException, HiveAccessControlException, CommandNeedRetryException {
reset(mockedAuthorizer);
int status=driver.compile("describe " + tableName);
assertEquals(0,status);
List inputs=getHivePrivilegeObjectInputs().getLeft();
checkSingleTableInput(inputs);
HivePrivilegeObject tableObj=inputs.get(0);
assertNull("columns used",tableObj.getColumns());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testTempFunction() throws HiveAuthzPluginException, HiveAccessControlException, CommandNeedRetryException {
reset(mockedAuthorizer);
final String funcName="testAuthFunc2";
int status=driver.compile("create temporary function " + funcName + " as 'org.apache.hadoop.hive.ql.udf.UDFPI'");
assertEquals(0,status);
List outputs=getHivePrivilegeObjectInputs().getRight();
HivePrivilegeObject funcObj=outputs.get(0);
assertEquals("input type",HivePrivilegeObjectType.FUNCTION,funcObj.getType());
assertTrue("function name",funcName.equalsIgnoreCase(funcObj.getObjectName()));
assertEquals("db name",null,funcObj.getDbname());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testShowTables() throws HiveAuthzPluginException, HiveAccessControlException, CommandNeedRetryException {
reset(mockedAuthorizer);
int status=driver.compile("show tables");
assertEquals(0,status);
Pair,List> io=getHivePrivilegeObjectInputs();
List inputs=io.getLeft();
assertEquals(1,inputs.size());
HivePrivilegeObject dbObj=inputs.get(0);
assertEquals("default",dbObj.getDbname().toLowerCase());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testPermFunction() throws HiveAuthzPluginException, HiveAccessControlException, CommandNeedRetryException {
reset(mockedAuthorizer);
final String funcName="testauthfunc1";
int status=driver.compile("create function " + dbName + "."+ funcName+ " as 'org.apache.hadoop.hive.ql.udf.UDFPI'");
assertEquals(0,status);
List outputs=getHivePrivilegeObjectInputs().getRight();
HivePrivilegeObject funcObj;
HivePrivilegeObject dbObj;
assertEquals("number of output object",2,outputs.size());
if (outputs.get(0).getType() == HivePrivilegeObjectType.FUNCTION) {
funcObj=outputs.get(0);
dbObj=outputs.get(1);
}
else {
funcObj=outputs.get(1);
dbObj=outputs.get(0);
}
assertEquals("input type",HivePrivilegeObjectType.FUNCTION,funcObj.getType());
assertTrue("function name",funcName.equalsIgnoreCase(funcObj.getObjectName()));
assertTrue("db name",dbName.equalsIgnoreCase(funcObj.getDbname()));
assertEquals("input type",HivePrivilegeObjectType.DATABASE,dbObj.getType());
assertTrue("db name",dbName.equalsIgnoreCase(dbObj.getDbname()));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDelete() throws HiveAuthzPluginException, HiveAccessControlException, CommandNeedRetryException {
reset(mockedAuthorizer);
int status=driver.compile("delete from " + acidTableName + " where j = 3");
assertEquals(0,status);
Pair,List> io=getHivePrivilegeObjectInputs();
List inputs=io.getLeft();
assertEquals(1,inputs.size());
HivePrivilegeObject tableObj=inputs.get(0);
assertEquals(1,tableObj.getColumns().size());
assertEquals("j",tableObj.getColumns().get(0));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testInputSomeColumnsUsed() throws HiveAuthzPluginException, HiveAccessControlException, CommandNeedRetryException {
reset(mockedAuthorizer);
int status=driver.compile("select i from " + tableName + " where k = 'X' and city = 'Scottsdale-AZ' ");
assertEquals(0,status);
List inputs=getHivePrivilegeObjectInputs().getLeft();
checkSingleTableInput(inputs);
HivePrivilegeObject tableObj=inputs.get(0);
assertEquals("no of columns used",3,tableObj.getColumns().size());
assertEquals("Columns used",Arrays.asList("city","i","k"),getSortedList(tableObj.getColumns()));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDescDatabase() throws HiveAuthzPluginException, HiveAccessControlException, CommandNeedRetryException {
reset(mockedAuthorizer);
int status=driver.compile("describe database " + dbName);
assertEquals(0,status);
Pair,List> io=getHivePrivilegeObjectInputs();
List inputs=io.getLeft();
assertEquals(1,inputs.size());
HivePrivilegeObject dbObj=inputs.get(0);
assertEquals(dbName.toLowerCase(),dbObj.getDbname().toLowerCase());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testInputAllColumnsUsed() throws HiveAuthzPluginException, HiveAccessControlException, CommandNeedRetryException {
reset(mockedAuthorizer);
int status=driver.compile("select * from " + tableName + " order by i");
assertEquals(0,status);
List inputs=getHivePrivilegeObjectInputs().getLeft();
checkSingleTableInput(inputs);
HivePrivilegeObject tableObj=inputs.get(0);
assertEquals("no of columns used",5,tableObj.getColumns().size());
assertEquals("Columns used",Arrays.asList("city","date","i","j","k"),getSortedList(tableObj.getColumns()));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testUpdateSomeColumnsUsed() throws HiveAuthzPluginException, HiveAccessControlException, CommandNeedRetryException {
reset(mockedAuthorizer);
int status=driver.compile("update " + acidTableName + " set i = 5 where j = 3");
assertEquals(0,status);
Pair,List> io=getHivePrivilegeObjectInputs();
List outputs=io.getRight();
HivePrivilegeObject tableObj=outputs.get(0);
LOG.debug("Got privilege object " + tableObj);
assertEquals("no of columns used",1,tableObj.getColumns().size());
assertEquals("Column used","i",tableObj.getColumns().get(0));
List inputs=io.getLeft();
assertEquals(1,inputs.size());
tableObj=inputs.get(0);
assertEquals(2,tableObj.getColumns().size());
assertEquals("j",tableObj.getColumns().get(0));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testUpdateSomeColumnsUsedExprInSet() throws HiveAuthzPluginException, HiveAccessControlException, CommandNeedRetryException {
reset(mockedAuthorizer);
int status=driver.compile("update " + acidTableName + " set i = 5, l = k where j = 3");
assertEquals(0,status);
Pair,List> io=getHivePrivilegeObjectInputs();
List outputs=io.getRight();
HivePrivilegeObject tableObj=outputs.get(0);
LOG.debug("Got privilege object " + tableObj);
assertEquals("no of columns used",2,tableObj.getColumns().size());
assertEquals("Columns used",Arrays.asList("i","l"),getSortedList(tableObj.getColumns()));
List inputs=io.getLeft();
assertEquals(1,inputs.size());
tableObj=inputs.get(0);
assertEquals(2,tableObj.getColumns().size());
assertEquals("Columns used",Arrays.asList("j","k"),getSortedList(tableObj.getColumns()));
}
Class: org.apache.hadoop.hive.ql.session.TestAddResource EqualityVerifier
@Test public void testSanity() throws URISyntaxException, IOException {
SessionState ss=Mockito.spy(SessionState.start(conf).get());
String query="testQuery";
List list=new LinkedList();
List addList=new LinkedList();
list.add(createURI(TEST_JAR_DIR + "testjar1.jar"));
list.add(createURI(TEST_JAR_DIR + "testjar2.jar"));
list.add(createURI(TEST_JAR_DIR + "testjar3.jar"));
list.add(createURI(TEST_JAR_DIR + "testjar4.jar"));
list.add(createURI(TEST_JAR_DIR + "testjar5.jar"));
Mockito.when(ss.resolveAndDownload(query,false)).thenReturn(list);
addList.add(query);
ss.add_resources(t,addList);
Set dependencies=ss.list_resource(t,null);
LinkedList actual=new LinkedList();
for ( String dependency : dependencies) {
actual.add(createURI(dependency));
}
Collections.sort(list);
Collections.sort(actual);
assertEquals(list,actual);
ss.close();
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDeleteJarMultiple() throws URISyntaxException, IOException {
SessionState ss=Mockito.spy(SessionState.start(conf).get());
String query1="testQuery1";
String query2="testQuery2";
String query3="testQuery3";
List list1=new LinkedList();
List list2=new LinkedList();
List list3=new LinkedList();
List addList=new LinkedList();
list1.add(createURI(TEST_JAR_DIR + "testjar1.jar"));
list1.add(createURI(TEST_JAR_DIR + "testjar2.jar"));
list1.add(createURI(TEST_JAR_DIR + "testjar3.jar"));
list1.add(createURI(TEST_JAR_DIR + "testjar4.jar"));
list2.add(createURI(TEST_JAR_DIR + "testjar5.jar"));
list2.add(createURI(TEST_JAR_DIR + "testjar3.jar"));
list2.add(createURI(TEST_JAR_DIR + "testjar4.jar"));
list3.add(createURI(TEST_JAR_DIR + "testjar4.jar"));
list3.add(createURI(TEST_JAR_DIR + "testjar2.jar"));
list3.add(createURI(TEST_JAR_DIR + "testjar5.jar"));
Collections.sort(list1);
Collections.sort(list2);
Collections.sort(list3);
Mockito.when(ss.resolveAndDownload(query1,false)).thenReturn(list1);
Mockito.when(ss.resolveAndDownload(query2,false)).thenReturn(list2);
Mockito.when(ss.resolveAndDownload(query3,false)).thenReturn(list3);
addList.add(query1);
addList.add(query2);
addList.add(query3);
ss.add_resources(t,addList);
List deleteList=new LinkedList();
deleteList.add(list1.get(0).toString());
ss.delete_resources(t,deleteList);
Set dependencies=ss.list_resource(t,null);
LinkedList actual=new LinkedList();
for ( String dependency : dependencies) {
actual.add(createURI(dependency));
}
List expected=union(list2,list3);
Collections.sort(expected);
Collections.sort(actual);
assertEquals(expected,actual);
actual.clear();
expected.clear();
deleteList.clear();
deleteList.add(list2.get(0).toString());
ss.delete_resources(t,deleteList);
dependencies=ss.list_resource(t,null);
actual=new LinkedList();
for ( String dependency : dependencies) {
actual.add(createURI(dependency));
}
expected=new LinkedList(list3);
Collections.sort(expected);
Collections.sort(actual);
assertEquals(expected,actual);
actual.clear();
expected.clear();
deleteList.clear();
deleteList.add(list3.get(0).toString());
ss.delete_resources(t,deleteList);
dependencies=ss.list_resource(t,null);
assertEquals(dependencies.isEmpty(),true);
ss.close();
}
APIUtilityVerifier EqualityVerifier
@Test public void testUnion() throws URISyntaxException, IOException {
HiveConf conf=new HiveConf();
SessionState ss=Mockito.spy(SessionState.start(conf).get());
ResourceType t=ResourceType.JAR;
String query1="testQuery1";
String query2="testQuery2";
List addList=new LinkedList();
List list1=new LinkedList();
List list2=new LinkedList();
list1.add(createURI(TEST_JAR_DIR + "testjar1.jar"));
list1.add(createURI(TEST_JAR_DIR + "testjar2.jar"));
list1.add(createURI(TEST_JAR_DIR + "testjar3.jar"));
list1.add(createURI(TEST_JAR_DIR + "testjar4.jar"));
list2.add(createURI(TEST_JAR_DIR + "testjar5.jar"));
list2.add(createURI(TEST_JAR_DIR + "testjar3.jar"));
list2.add(createURI(TEST_JAR_DIR + "testjar4.jar"));
Mockito.when(ss.resolveAndDownload(query1,false)).thenReturn(list1);
Mockito.when(ss.resolveAndDownload(query2,false)).thenReturn(list2);
addList.add(query1);
addList.add(query2);
ss.add_resources(t,addList);
Set dependencies=ss.list_resource(t,null);
LinkedList actual=new LinkedList();
for ( String dependency : dependencies) {
actual.add(createURI(dependency));
}
List expected=union(list1,list2);
Collections.sort(expected);
Collections.sort(actual);
assertEquals(expected,actual);
ss.close();
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDeleteJar() throws URISyntaxException, IOException {
SessionState ss=Mockito.spy(SessionState.start(conf).get());
String query1="testQuery1";
String query2="testQuery2";
List list1=new LinkedList();
List list2=new LinkedList();
List addList=new LinkedList();
list1.add(createURI(TEST_JAR_DIR + "testjar1.jar"));
list1.add(createURI(TEST_JAR_DIR + "testjar2.jar"));
list1.add(createURI(TEST_JAR_DIR + "testjar3.jar"));
list1.add(createURI(TEST_JAR_DIR + "testjar4.jar"));
list2.add(createURI(TEST_JAR_DIR + "testjar5.jar"));
list2.add(createURI(TEST_JAR_DIR + "testjar3.jar"));
list2.add(createURI(TEST_JAR_DIR + "testjar4.jar"));
Collections.sort(list1);
Collections.sort(list2);
Mockito.when(ss.resolveAndDownload(query1,false)).thenReturn(list1);
Mockito.when(ss.resolveAndDownload(query2,false)).thenReturn(list2);
addList.add(query1);
addList.add(query2);
ss.add_resources(t,addList);
List deleteList=new LinkedList();
deleteList.add(list1.get(0).toString());
ss.delete_resources(t,deleteList);
Set dependencies=ss.list_resource(t,null);
LinkedList actual=new LinkedList();
for ( String dependency : dependencies) {
actual.add(createURI(dependency));
}
List expected=list2;
Collections.sort(expected);
Collections.sort(actual);
assertEquals(expected,actual);
deleteList.clear();
deleteList.add(list2.get(0).toString());
ss.delete_resources(t,deleteList);
dependencies=ss.list_resource(t,null);
assertEquals(dependencies.isEmpty(),true);
ss.close();
}
EqualityVerifier
@Test public void testDuplicateAdds() throws URISyntaxException, IOException {
SessionState ss=Mockito.spy(SessionState.start(conf).get());
String query="testQuery";
List list=new LinkedList();
List addList=new LinkedList();
list.add(createURI(TEST_JAR_DIR + "testjar1.jar"));
list.add(createURI(TEST_JAR_DIR + "testjar2.jar"));
list.add(createURI(TEST_JAR_DIR + "testjar3.jar"));
list.add(createURI(TEST_JAR_DIR + "testjar4.jar"));
list.add(createURI(TEST_JAR_DIR + "testjar5.jar"));
Collections.sort(list);
Mockito.when(ss.resolveAndDownload(query,false)).thenReturn(list);
for (int i=0; i < 10; i++) {
addList.add(query);
}
ss.add_resources(t,addList);
Set dependencies=ss.list_resource(t,null);
LinkedList actual=new LinkedList();
for ( String dependency : dependencies) {
actual.add(createURI(dependency));
}
Collections.sort(actual);
assertEquals(list,actual);
ss.close();
}
Class: org.apache.hadoop.hive.ql.session.TestSessionState UtilityVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testReloadExistingAuxJars2(){
HiveConf conf=new HiveConf();
HiveConf.setVar(conf,ConfVars.HIVERELOADABLEJARS,hiveReloadPath);
SessionState ss=new SessionState(conf);
SessionState.start(ss);
File dist=null;
try {
ss=SessionState.get();
LOG.info("copy jar file 1");
dist=new File(reloadFolder.getAbsolutePath() + File.separator + reloadClazzFileName);
Files.copy(new File(HiveTestUtils.getFileFromClasspath(clazzDistFileName)),dist);
ss.reloadAuxJars();
Assert.assertEquals("version1",getReloadedClazzVersion(ss.getConf().getClassLoader()));
LOG.info("copy jar file 2");
FileUtils.deleteQuietly(dist);
Files.copy(new File(HiveTestUtils.getFileFromClasspath(clazzV2FileName)),dist);
ss.reloadAuxJars();
Assert.assertEquals("version2",getReloadedClazzVersion(ss.getConf().getClassLoader()));
FileUtils.deleteQuietly(dist);
ss.reloadAuxJars();
}
catch ( Exception e) {
LOG.error("refresh existing jar file case failed with message: ",e);
Assert.fail(e.getMessage());
}
finally {
FileUtils.deleteQuietly(dist);
try {
ss.close();
}
catch ( IOException ioException) {
Assert.fail(ioException.getMessage());
LOG.error("Fail to close the created session: ",ioException);
}
}
}
EqualityVerifier
/**
* test set and get db
*/
@Test public void testgetDbName() throws Exception {
assertEquals(MetaStoreUtils.DEFAULT_DATABASE_NAME,SessionState.get().getCurrentDatabase());
final String newdb="DB_2";
SessionState.get().setCurrentDatabase(newdb);
assertEquals(newdb,SessionState.get().getCurrentDatabase());
SessionState.start(new HiveConf());
assertEquals(MetaStoreUtils.DEFAULT_DATABASE_NAME,SessionState.get().getCurrentDatabase());
}
InternalCallVerifier EqualityVerifier
@Test public void testClassLoaderEquality() throws Exception {
HiveConf conf=new HiveConf();
final SessionState ss1=new SessionState(conf);
RegisterJarRunnable otherThread=new RegisterJarRunnable("./build/contrib/test/test-udfs.jar",ss1);
Thread th1=new Thread(otherThread);
th1.start();
th1.join();
SessionState.start(ss1);
SessionState ss2=SessionState.get();
ClassLoader loader2=ss2.getConf().getClassLoader();
System.out.println("Loader1:(Set in other thread) " + otherThread.loader);
System.out.println("Loader2:(Set in SessionState.conf) " + loader2);
System.out.println("Loader3:(CurrentThread.getContextClassLoader()) " + Thread.currentThread().getContextClassLoader());
assertEquals("Other thread loader and session state loader",otherThread.loader,loader2);
assertEquals("Other thread loader and current thread loader",otherThread.loader,Thread.currentThread().getContextClassLoader());
}
UtilityVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testReloadAuxJars2(){
HiveConf conf=new HiveConf();
HiveConf.setVar(conf,ConfVars.HIVERELOADABLEJARS,hiveReloadPath);
SessionState ss=new SessionState(conf);
SessionState.start(ss);
ss=SessionState.get();
File dist=null;
try {
dist=new File(reloadFolder.getAbsolutePath() + File.separator + reloadClazzFileName);
Files.copy(new File(HiveTestUtils.getFileFromClasspath(clazzDistFileName)),dist);
ss.reloadAuxJars();
Assert.assertEquals("version1",getReloadedClazzVersion(ss.getConf().getClassLoader()));
}
catch ( Exception e) {
LOG.error("Reload auxiliary jar test fail with message: ",e);
Assert.fail(e.getMessage());
}
finally {
FileUtils.deleteQuietly(dist);
try {
ss.close();
}
catch ( IOException ioException) {
Assert.fail(ioException.getMessage());
LOG.error("Fail to close the created session: ",ioException);
}
}
}
Class: org.apache.hadoop.hive.ql.txn.compactor.TestCleaner APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void cleanupAfterMajorPartitionCompactionNoBase() throws Exception {
Table t=newTable("default","campcnb",true);
Partition p=newPartition(t,"today");
addDeltaFile(t,p,1L,22L,22);
addDeltaFile(t,p,23L,24L,2);
addBaseFile(t,p,25L,25);
burnThroughTransactions(25);
CompactionRequest rqst=new CompactionRequest("default","campcnb",CompactionType.MAJOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
CompactionInfo ci=txnHandler.findNextToCompact("fred");
txnHandler.markCompacted(ci);
txnHandler.setRunAs(ci.id,System.getProperty("user.name"));
startCleaner();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
Assert.assertEquals(1,rsp.getCompactsSize());
Assert.assertTrue(TxnStore.SUCCEEDED_RESPONSE.equals(rsp.getCompacts().get(0).getState()));
List paths=getDirectories(conf,t,p);
Assert.assertEquals(1,paths.size());
Assert.assertEquals("base_25",paths.get(0).getName());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void notBlockedBySubsequentLock() throws Exception {
Table t=newTable("default","bblt",false);
conf.setTimeVar(HiveConf.ConfVars.HIVE_COMPACTOR_CLEANER_RUN_INTERVAL,100,TimeUnit.MILLISECONDS);
addBaseFile(t,null,20L,20);
addDeltaFile(t,null,21L,22L,2);
addDeltaFile(t,null,23L,24L,2);
addDeltaFile(t,null,21L,24L,4);
burnThroughTransactions(25);
CompactionRequest rqst=new CompactionRequest("default","bblt",CompactionType.MINOR);
txnHandler.compact(rqst);
CompactionInfo ci=txnHandler.findNextToCompact("fred");
txnHandler.markCompacted(ci);
txnHandler.setRunAs(ci.id,System.getProperty("user.name"));
LockComponent comp=new LockComponent(LockType.SHARED_READ,LockLevel.TABLE,"default");
comp.setTablename("bblt");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
LockResponse res=txnHandler.lock(req);
AtomicBoolean looped=new AtomicBoolean();
looped.set(false);
startCleaner(looped);
while (!looped.get()) {
Thread.currentThread().sleep(100);
}
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
LockComponent comp2=new LockComponent(LockType.SHARED_READ,LockLevel.TABLE,"default");
comp2.setTablename("bblt");
List components2=new ArrayList(1);
components2.add(comp2);
LockRequest req2=new LockRequest(components,"me","localhost");
LockResponse res2=txnHandler.lock(req2);
txnHandler.unlock(new UnlockRequest(res.getLockid()));
looped.set(false);
while (!looped.get()) {
Thread.currentThread().sleep(100);
}
stopThread();
Thread.currentThread().sleep(200);
rsp=txnHandler.showCompact(new ShowCompactRequest());
compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertTrue(TxnStore.SUCCEEDED_RESPONSE.equals(rsp.getCompacts().get(0).getState()));
}
InternalCallVerifier EqualityVerifier
@Test public void blockedByLockTable() throws Exception {
Table t=newTable("default","bblt",false);
addBaseFile(t,null,20L,20);
addDeltaFile(t,null,21L,22L,2);
addDeltaFile(t,null,23L,24L,2);
addDeltaFile(t,null,21L,24L,4);
burnThroughTransactions(25);
CompactionRequest rqst=new CompactionRequest("default","bblt",CompactionType.MINOR);
txnHandler.compact(rqst);
CompactionInfo ci=txnHandler.findNextToCompact("fred");
txnHandler.markCompacted(ci);
txnHandler.setRunAs(ci.id,System.getProperty("user.name"));
LockComponent comp=new LockComponent(LockType.SHARED_READ,LockLevel.TABLE,"default");
comp.setTablename("bblt");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
LockResponse res=txnHandler.lock(req);
startCleaner();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("ready for cleaning",compacts.get(0).getState());
Assert.assertEquals("bblt",compacts.get(0).getTablename());
Assert.assertEquals(CompactionType.MINOR,compacts.get(0).getType());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void partitionNotBlockedBySubsequentLock() throws Exception {
Table t=newTable("default","bblt",true);
Partition p=newPartition(t,"today");
conf.setTimeVar(HiveConf.ConfVars.HIVE_COMPACTOR_CLEANER_RUN_INTERVAL,100,TimeUnit.MILLISECONDS);
addBaseFile(t,p,20L,20);
addDeltaFile(t,p,21L,22L,2);
addDeltaFile(t,p,23L,24L,2);
addDeltaFile(t,p,21L,24L,4);
burnThroughTransactions(25);
CompactionRequest rqst=new CompactionRequest("default","bblt",CompactionType.MINOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
CompactionInfo ci=txnHandler.findNextToCompact("fred");
txnHandler.markCompacted(ci);
txnHandler.setRunAs(ci.id,System.getProperty("user.name"));
LockComponent comp=new LockComponent(LockType.SHARED_READ,LockLevel.PARTITION,"default");
comp.setTablename("bblt");
comp.setPartitionname("ds=today");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
LockResponse res=txnHandler.lock(req);
AtomicBoolean looped=new AtomicBoolean();
looped.set(false);
startCleaner(looped);
while (!looped.get()) {
Thread.currentThread().sleep(100);
}
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
LockComponent comp2=new LockComponent(LockType.SHARED_READ,LockLevel.PARTITION,"default");
comp2.setTablename("bblt");
comp2.setPartitionname("ds=today");
List components2=new ArrayList(1);
components2.add(comp2);
LockRequest req2=new LockRequest(components,"me","localhost");
LockResponse res2=txnHandler.lock(req2);
txnHandler.unlock(new UnlockRequest(res.getLockid()));
looped.set(false);
while (!looped.get()) {
Thread.currentThread().sleep(100);
}
stopThread();
Thread.currentThread().sleep(200);
rsp=txnHandler.showCompact(new ShowCompactRequest());
compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertTrue(TxnStore.SUCCEEDED_RESPONSE.equals(rsp.getCompacts().get(0).getState()));
}
InternalCallVerifier EqualityVerifier
@Test public void blockedByLockPartition() throws Exception {
Table t=newTable("default","bblp",true);
Partition p=newPartition(t,"today");
addBaseFile(t,p,20L,20);
addDeltaFile(t,p,21L,22L,2);
addDeltaFile(t,p,23L,24L,2);
addDeltaFile(t,p,21L,24L,4);
burnThroughTransactions(25);
CompactionRequest rqst=new CompactionRequest("default","bblp",CompactionType.MINOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
CompactionInfo ci=txnHandler.findNextToCompact("fred");
txnHandler.markCompacted(ci);
txnHandler.setRunAs(ci.id,System.getProperty("user.name"));
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.PARTITION,"default");
comp.setTablename("bblp");
comp.setPartitionname("ds=today");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
LockResponse res=txnHandler.lock(req);
startCleaner();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("ready for cleaning",compacts.get(0).getState());
Assert.assertEquals("bblp",compacts.get(0).getTablename());
Assert.assertEquals("ds=today",compacts.get(0).getPartitionname());
Assert.assertEquals(CompactionType.MINOR,compacts.get(0).getType());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void droppedPartition() throws Exception {
Table t=newTable("default","dp",true);
Partition p=newPartition(t,"today");
addDeltaFile(t,p,1L,22L,22);
addDeltaFile(t,p,23L,24L,2);
addBaseFile(t,p,25L,25);
burnThroughTransactions(25);
CompactionRequest rqst=new CompactionRequest("default","dp",CompactionType.MAJOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
CompactionInfo ci=txnHandler.findNextToCompact("fred");
txnHandler.markCompacted(ci);
txnHandler.setRunAs(ci.id,System.getProperty("user.name"));
ms.dropPartition("default","dp",Collections.singletonList("today"),true);
startCleaner();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
Assert.assertEquals(1,rsp.getCompactsSize());
Assert.assertTrue(TxnStore.SUCCEEDED_RESPONSE.equals(rsp.getCompacts().get(0).getState()));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void cleanupAfterMajorPartitionCompaction() throws Exception {
Table t=newTable("default","campc",true);
Partition p=newPartition(t,"today");
addBaseFile(t,p,20L,20);
addDeltaFile(t,p,21L,22L,2);
addDeltaFile(t,p,23L,24L,2);
addBaseFile(t,p,25L,25);
burnThroughTransactions(25);
CompactionRequest rqst=new CompactionRequest("default","campc",CompactionType.MAJOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
CompactionInfo ci=txnHandler.findNextToCompact("fred");
txnHandler.markCompacted(ci);
txnHandler.setRunAs(ci.id,System.getProperty("user.name"));
startCleaner();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
Assert.assertEquals(1,rsp.getCompactsSize());
Assert.assertTrue(TxnStore.SUCCEEDED_RESPONSE.equals(rsp.getCompacts().get(0).getState()));
List paths=getDirectories(conf,t,p);
Assert.assertEquals(1,paths.size());
Assert.assertEquals("base_25",paths.get(0).getName());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void cleanupAfterMajorTableCompaction() throws Exception {
Table t=newTable("default","camtc",false);
addBaseFile(t,null,20L,20);
addDeltaFile(t,null,21L,22L,2);
addDeltaFile(t,null,23L,24L,2);
addBaseFile(t,null,25L,25);
burnThroughTransactions(25);
CompactionRequest rqst=new CompactionRequest("default","camtc",CompactionType.MAJOR);
txnHandler.compact(rqst);
CompactionInfo ci=txnHandler.findNextToCompact("fred");
txnHandler.markCompacted(ci);
txnHandler.setRunAs(ci.id,System.getProperty("user.name"));
startCleaner();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
Assert.assertEquals(1,rsp.getCompactsSize());
Assert.assertTrue(TxnStore.SUCCEEDED_RESPONSE.equals(rsp.getCompacts().get(0).getState()));
List paths=getDirectories(conf,t,null);
Assert.assertEquals(1,paths.size());
Assert.assertEquals("base_25",paths.get(0).getName());
}
APIUtilityVerifier BranchVerifier UtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void cleanupAfterMinorTableCompaction() throws Exception {
Table t=newTable("default","camitc",false);
addBaseFile(t,null,20L,20);
addDeltaFile(t,null,21L,22L,2);
addDeltaFile(t,null,23L,24L,2);
addDeltaFile(t,null,21L,24L,4);
burnThroughTransactions(25);
CompactionRequest rqst=new CompactionRequest("default","camitc",CompactionType.MINOR);
txnHandler.compact(rqst);
CompactionInfo ci=txnHandler.findNextToCompact("fred");
txnHandler.markCompacted(ci);
txnHandler.setRunAs(ci.id,System.getProperty("user.name"));
startCleaner();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
Assert.assertEquals(1,rsp.getCompactsSize());
Assert.assertTrue(TxnStore.SUCCEEDED_RESPONSE.equals(rsp.getCompacts().get(0).getState()));
List paths=getDirectories(conf,t,null);
Assert.assertEquals(2,paths.size());
boolean sawBase=false, sawDelta=false;
for ( Path p : paths) {
if (p.getName().equals("base_20")) sawBase=true;
else if (p.getName().equals(makeDeltaDirName(21,24))) sawDelta=true;
else Assert.fail("Unexpected file " + p.getName());
}
Assert.assertTrue(sawBase);
Assert.assertTrue(sawDelta);
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void droppedTable() throws Exception {
Table t=newTable("default","dt",false);
addDeltaFile(t,null,1L,22L,22);
addDeltaFile(t,null,23L,24L,2);
addBaseFile(t,null,25L,25);
burnThroughTransactions(25);
CompactionRequest rqst=new CompactionRequest("default","dt",CompactionType.MINOR);
txnHandler.compact(rqst);
CompactionInfo ci=txnHandler.findNextToCompact("fred");
txnHandler.markCompacted(ci);
txnHandler.setRunAs(ci.id,System.getProperty("user.name"));
ms.dropTable("default","dt");
startCleaner();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
Assert.assertEquals(1,rsp.getCompactsSize());
Assert.assertTrue(TxnStore.SUCCEEDED_RESPONSE.equals(rsp.getCompacts().get(0).getState()));
}
APIUtilityVerifier BranchVerifier UtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void cleanupAfterMinorPartitionCompaction() throws Exception {
Table t=newTable("default","camipc",true);
Partition p=newPartition(t,"today");
addBaseFile(t,p,20L,20);
addDeltaFile(t,p,21L,22L,2);
addDeltaFile(t,p,23L,24L,2);
addDeltaFile(t,p,21L,24L,4);
burnThroughTransactions(25);
CompactionRequest rqst=new CompactionRequest("default","camipc",CompactionType.MINOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
CompactionInfo ci=txnHandler.findNextToCompact("fred");
txnHandler.markCompacted(ci);
txnHandler.setRunAs(ci.id,System.getProperty("user.name"));
startCleaner();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
Assert.assertEquals(1,rsp.getCompactsSize());
Assert.assertTrue(TxnStore.SUCCEEDED_RESPONSE.equals(rsp.getCompacts().get(0).getState()));
List paths=getDirectories(conf,t,p);
Assert.assertEquals(2,paths.size());
boolean sawBase=false, sawDelta=false;
for ( Path path : paths) {
if (path.getName().equals("base_20")) sawBase=true;
else if (path.getName().equals(makeDeltaDirNameCompacted(21,24))) sawDelta=true;
else Assert.fail("Unexpected file " + path.getName());
}
Assert.assertTrue(sawBase);
Assert.assertTrue(sawDelta);
}
Class: org.apache.hadoop.hive.ql.txn.compactor.TestCompactor APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void dynamicPartitioningUpdate() throws Exception {
String tblName="udpct";
List colNames=Arrays.asList("a","b");
executeStatementOnDriver("drop table if exists " + tblName,driver);
executeStatementOnDriver("CREATE TABLE " + tblName + "(a INT, b STRING) "+ " PARTITIONED BY(ds string)"+ " CLUSTERED BY(a) INTO 2 BUCKETS"+ " STORED AS ORC TBLPROPERTIES ('transactional'='true')",driver);
executeStatementOnDriver("insert into " + tblName + " partition (ds) values (1, 'fred', "+ "'today'), (2, 'wilma', 'yesterday')",driver);
executeStatementOnDriver("update " + tblName + " set b = 'barney'",driver);
Initiator initiator=new Initiator();
initiator.setThreadId((int)initiator.getId());
conf.setIntVar(HiveConf.ConfVars.HIVE_COMPACTOR_DELTA_NUM_THRESHOLD,1);
initiator.setHiveConf(conf);
AtomicBoolean stop=new AtomicBoolean();
stop.set(true);
initiator.init(stop,new AtomicBoolean());
initiator.run();
TxnStore txnHandler=TxnUtils.getTxnStore(conf);
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(2,compacts.size());
SortedSet partNames=new TreeSet();
for (int i=0; i < compacts.size(); i++) {
Assert.assertEquals("default",compacts.get(i).getDbname());
Assert.assertEquals(tblName,compacts.get(i).getTablename());
Assert.assertEquals("initiated",compacts.get(i).getState());
partNames.add(compacts.get(i).getPartitionname());
}
List names=new ArrayList(partNames);
Assert.assertEquals("ds=today",names.get(0));
Assert.assertEquals("ds=yesterday",names.get(1));
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void dynamicPartitioningDelete() throws Exception {
String tblName="ddpct";
List colNames=Arrays.asList("a","b");
executeStatementOnDriver("drop table if exists " + tblName,driver);
executeStatementOnDriver("CREATE TABLE " + tblName + "(a INT, b STRING) "+ " PARTITIONED BY(ds string)"+ " CLUSTERED BY(a) INTO 2 BUCKETS"+ " STORED AS ORC TBLPROPERTIES ('transactional'='true')",driver);
executeStatementOnDriver("insert into " + tblName + " partition (ds) values (1, 'fred', "+ "'today'), (2, 'wilma', 'yesterday')",driver);
executeStatementOnDriver("update " + tblName + " set b = 'fred' where a = 1",driver);
executeStatementOnDriver("delete from " + tblName + " where b = 'fred'",driver);
Initiator initiator=new Initiator();
initiator.setThreadId((int)initiator.getId());
conf.setIntVar(HiveConf.ConfVars.HIVE_COMPACTOR_DELTA_NUM_THRESHOLD,2);
initiator.setHiveConf(conf);
AtomicBoolean stop=new AtomicBoolean();
stop.set(true);
initiator.init(stop,new AtomicBoolean());
initiator.run();
TxnStore txnHandler=TxnUtils.getTxnStore(conf);
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
SortedSet partNames=new TreeSet();
for (int i=0; i < compacts.size(); i++) {
Assert.assertEquals("default",compacts.get(i).getDbname());
Assert.assertEquals(tblName,compacts.get(i).getTablename());
Assert.assertEquals("initiated",compacts.get(i).getState());
partNames.add(compacts.get(i).getPartitionname());
}
List names=new ArrayList(partNames);
Assert.assertEquals("ds=today",names.get(0));
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void dynamicPartitioningInsert() throws Exception {
String tblName="dpct";
List colNames=Arrays.asList("a","b");
executeStatementOnDriver("drop table if exists " + tblName,driver);
executeStatementOnDriver("CREATE TABLE " + tblName + "(a INT, b STRING) "+ " PARTITIONED BY(ds string)"+ " CLUSTERED BY(a) INTO 2 BUCKETS"+ " STORED AS ORC TBLPROPERTIES ('transactional'='true')",driver);
executeStatementOnDriver("insert into " + tblName + " partition (ds) values (1, 'fred', "+ "'today'), (2, 'wilma', 'yesterday')",driver);
Initiator initiator=new Initiator();
initiator.setThreadId((int)initiator.getId());
conf.setIntVar(HiveConf.ConfVars.HIVE_COMPACTOR_DELTA_NUM_THRESHOLD,0);
initiator.setHiveConf(conf);
AtomicBoolean stop=new AtomicBoolean();
stop.set(true);
initiator.init(stop,new AtomicBoolean());
initiator.run();
TxnStore txnHandler=TxnUtils.getTxnStore(conf);
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(2,compacts.size());
SortedSet partNames=new TreeSet();
for (int i=0; i < compacts.size(); i++) {
Assert.assertEquals("default",compacts.get(i).getDbname());
Assert.assertEquals(tblName,compacts.get(i).getTablename());
Assert.assertEquals("initiated",compacts.get(i).getState());
partNames.add(compacts.get(i).getPartitionname());
}
List names=new ArrayList(partNames);
Assert.assertEquals("ds=today",names.get(0));
Assert.assertEquals("ds=yesterday",names.get(1));
}
APIUtilityVerifier BranchVerifier UtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier PublicFieldVerifier HybridVerifier
/**
* After each major compaction, stats need to be updated on each column of the
* table/partition which previously had stats.
* 1. create a bucketed ORC backed table (Orc is currently required by ACID)
* 2. populate 2 partitions with data
* 3. compute stats
* 4. insert some data into the table using StreamingAPI
* 5. Trigger major compaction (which should update stats)
* 6. check that stats have been updated
* @throws Exceptiontodo:
* 2. add non-partitioned test
* 4. add a test with sorted table?
*/
@Test public void testStatsAfterCompactionPartTbl() throws Exception {
String tblName="compaction_test";
String tblNameStg=tblName + "_stg";
List colNames=Arrays.asList("a","b");
executeStatementOnDriver("drop table if exists " + tblName,driver);
executeStatementOnDriver("drop table if exists " + tblNameStg,driver);
executeStatementOnDriver("CREATE TABLE " + tblName + "(a INT, b STRING) "+ " PARTITIONED BY(bkt INT)"+ " CLUSTERED BY(a) INTO 4 BUCKETS"+ " STORED AS ORC TBLPROPERTIES ('transactional'='true')",driver);
executeStatementOnDriver("CREATE EXTERNAL TABLE " + tblNameStg + "(a INT, b STRING)"+ " ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t' LINES TERMINATED BY '\\n'"+ " STORED AS TEXTFILE"+ " LOCATION '"+ stagingFolder.newFolder().toURI().getPath()+ "'"+ " TBLPROPERTIES ('transactional'='true')",driver);
executeStatementOnDriver("load data local inpath '" + BASIC_FILE_NAME + "' overwrite into table "+ tblNameStg,driver);
execSelectAndDumpData("select * from " + tblNameStg,driver,"Dumping data for " + tblNameStg + " after load:");
executeStatementOnDriver("FROM " + tblNameStg + " INSERT INTO TABLE "+ tblName+ " PARTITION(bkt=0) "+ "SELECT a, b where a < 2",driver);
executeStatementOnDriver("FROM " + tblNameStg + " INSERT INTO TABLE "+ tblName+ " PARTITION(bkt=1) "+ "SELECT a, b where a >= 2",driver);
execSelectAndDumpData("select * from " + tblName,driver,"Dumping data for " + tblName + " after load:");
TxnStore txnHandler=TxnUtils.getTxnStore(conf);
CompactionInfo ci=new CompactionInfo("default",tblName,"bkt=0",CompactionType.MAJOR);
LOG.debug("List of stats columns before analyze Part1: " + txnHandler.findColumnsWithStats(ci));
Worker.StatsUpdater su=Worker.StatsUpdater.init(ci,colNames,conf,System.getProperty("user.name"));
su.gatherStats();
LOG.debug("List of stats columns after analyze Part1: " + txnHandler.findColumnsWithStats(ci));
CompactionInfo ciPart2=new CompactionInfo("default",tblName,"bkt=1",CompactionType.MAJOR);
LOG.debug("List of stats columns before analyze Part2: " + txnHandler.findColumnsWithStats(ci));
su=Worker.StatsUpdater.init(ciPart2,colNames,conf,System.getProperty("user.name"));
su.gatherStats();
LOG.debug("List of stats columns after analyze Part2: " + txnHandler.findColumnsWithStats(ci));
Map> stats=msClient.getPartitionColumnStatistics(ci.dbname,ci.tableName,Arrays.asList(ci.partName),colNames);
List colStats=stats.get(ci.partName);
Assert.assertNotNull("No stats found for partition " + ci.partName,colStats);
Assert.assertEquals("Expected column 'a' at index 0","a",colStats.get(0).getColName());
Assert.assertEquals("Expected column 'b' at index 1","b",colStats.get(1).getColName());
LongColumnStatsData colAStats=colStats.get(0).getStatsData().getLongStats();
Assert.assertEquals("lowValue a",1,colAStats.getLowValue());
Assert.assertEquals("highValue a",1,colAStats.getHighValue());
Assert.assertEquals("numNulls a",0,colAStats.getNumNulls());
Assert.assertEquals("numNdv a",1,colAStats.getNumDVs());
StringColumnStatsData colBStats=colStats.get(1).getStatsData().getStringStats();
Assert.assertEquals("maxColLen b",3,colBStats.getMaxColLen());
Assert.assertEquals("avgColLen b",3.0,colBStats.getAvgColLen(),0.01);
Assert.assertEquals("numNulls b",0,colBStats.getNumNulls());
Assert.assertEquals("nunDVs",2,colBStats.getNumDVs());
stats=msClient.getPartitionColumnStatistics(ciPart2.dbname,ciPart2.tableName,Arrays.asList(ciPart2.partName),colNames);
colStats=stats.get(ciPart2.partName);
LongColumnStatsData colAStatsPart2=colStats.get(0).getStatsData().getLongStats();
StringColumnStatsData colBStatsPart2=colStats.get(1).getStatsData().getStringStats();
HiveEndPoint endPt=new HiveEndPoint(null,ci.dbname,ci.tableName,Arrays.asList("0"));
DelimitedInputWriter writer=new DelimitedInputWriter(new String[]{"a","b"},",",endPt);
StreamingConnection connection=endPt.newConnection(true);
TransactionBatch txnBatch=connection.fetchTransactionBatch(2,writer);
txnBatch.beginNextTransaction();
Assert.assertEquals(TransactionBatch.TxnState.OPEN,txnBatch.getCurrentTransactionState());
txnBatch.write("50,Kiev".getBytes());
txnBatch.write("51,St. Petersburg".getBytes());
txnBatch.write("44,Boston".getBytes());
txnBatch.commit();
txnBatch.beginNextTransaction();
txnBatch.write("52,Tel Aviv".getBytes());
txnBatch.write("53,Atlantis".getBytes());
txnBatch.write("53,Boston".getBytes());
txnBatch.commit();
txnBatch.close();
connection.close();
execSelectAndDumpData("select * from " + ci.getFullTableName(),driver,ci.getFullTableName());
CompactionRequest rqst=new CompactionRequest(ci.dbname,ci.tableName,CompactionType.MAJOR);
rqst.setPartitionname(ci.partName);
txnHandler.compact(rqst);
Worker t=new Worker();
t.setThreadId((int)t.getId());
t.setHiveConf(conf);
AtomicBoolean stop=new AtomicBoolean();
AtomicBoolean looped=new AtomicBoolean();
stop.set(true);
t.init(stop,looped);
t.run();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
if (1 != compacts.size()) {
Assert.fail("Expecting 1 file and found " + compacts.size() + " files "+ compacts.toString());
}
Assert.assertEquals("ready for cleaning",compacts.get(0).getState());
stats=msClient.getPartitionColumnStatistics(ci.dbname,ci.tableName,Arrays.asList(ci.partName),colNames);
colStats=stats.get(ci.partName);
Assert.assertNotNull("No stats found for partition " + ci.partName,colStats);
Assert.assertEquals("Expected column 'a' at index 0","a",colStats.get(0).getColName());
Assert.assertEquals("Expected column 'b' at index 1","b",colStats.get(1).getColName());
colAStats=colStats.get(0).getStatsData().getLongStats();
Assert.assertEquals("lowValue a",1,colAStats.getLowValue());
Assert.assertEquals("highValue a",53,colAStats.getHighValue());
Assert.assertEquals("numNulls a",0,colAStats.getNumNulls());
Assert.assertEquals("numNdv a",6,colAStats.getNumDVs());
colBStats=colStats.get(1).getStatsData().getStringStats();
Assert.assertEquals("maxColLen b",14,colBStats.getMaxColLen());
Assert.assertEquals("avgColLen b",(long)6.1111111111,(long)colBStats.getAvgColLen());
Assert.assertEquals("numNulls b",0,colBStats.getNumNulls());
Assert.assertEquals("nunDVs",10,colBStats.getNumDVs());
stats=msClient.getPartitionColumnStatistics(ciPart2.dbname,ciPart2.tableName,Arrays.asList(ciPart2.partName),colNames);
colStats=stats.get(ciPart2.partName);
Assert.assertEquals("Expected stats for " + ciPart2.partName + " to stay the same",colAStatsPart2,colStats.get(0).getStatsData().getLongStats());
Assert.assertEquals("Expected stats for " + ciPart2.partName + " to stay the same",colBStatsPart2,colStats.get(1).getStatsData().getStringStats());
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
/**
* Simple schema evolution add columns with partitioning.
* @throws Exception
*/
@Test public void schemaEvolutionAddColDynamicPartitioningInsert() throws Exception {
String tblName="dpct";
List colNames=Arrays.asList("a","b");
executeStatementOnDriver("drop table if exists " + tblName,driver);
executeStatementOnDriver("CREATE TABLE " + tblName + "(a INT, b STRING) "+ " PARTITIONED BY(ds string)"+ " CLUSTERED BY(a) INTO 2 BUCKETS"+ " STORED AS ORC TBLPROPERTIES ('transactional'='true')",driver);
executeStatementOnDriver("insert into " + tblName + " partition (ds) values (1, 'fred', "+ "'today'), (2, 'wilma', 'yesterday')",driver);
executeStatementOnDriver("ALTER TABLE " + tblName + " ADD COLUMNS(c int)",driver);
executeStatementOnDriver("SELECT * FROM " + tblName + " ORDER BY a",driver);
ArrayList valuesReadFromHiveDriver=new ArrayList();
driver.getResults(valuesReadFromHiveDriver);
Assert.assertEquals(2,valuesReadFromHiveDriver.size());
Assert.assertEquals("1\tfred\tNULL\ttoday",valuesReadFromHiveDriver.get(0));
Assert.assertEquals("2\twilma\tNULL\tyesterday",valuesReadFromHiveDriver.get(1));
executeStatementOnDriver("insert into " + tblName + " partition (ds) values "+ "(3, 'mark', 1900, 'soon'), (4, 'douglas', 1901, 'last_century'), "+ "(5, 'doc', 1902, 'yesterday')",driver);
executeStatementOnDriver("SELECT * FROM " + tblName + " ORDER BY a",driver);
valuesReadFromHiveDriver=new ArrayList();
driver.getResults(valuesReadFromHiveDriver);
Assert.assertEquals(5,valuesReadFromHiveDriver.size());
Assert.assertEquals("1\tfred\tNULL\ttoday",valuesReadFromHiveDriver.get(0));
Assert.assertEquals("2\twilma\tNULL\tyesterday",valuesReadFromHiveDriver.get(1));
Assert.assertEquals("3\tmark\t1900\tsoon",valuesReadFromHiveDriver.get(2));
Assert.assertEquals("4\tdouglas\t1901\tlast_century",valuesReadFromHiveDriver.get(3));
Assert.assertEquals("5\tdoc\t1902\tyesterday",valuesReadFromHiveDriver.get(4));
Initiator initiator=new Initiator();
initiator.setThreadId((int)initiator.getId());
conf.setIntVar(HiveConf.ConfVars.HIVE_COMPACTOR_DELTA_NUM_THRESHOLD,0);
initiator.setHiveConf(conf);
AtomicBoolean stop=new AtomicBoolean();
stop.set(true);
initiator.init(stop,new AtomicBoolean());
initiator.run();
TxnStore txnHandler=TxnUtils.getTxnStore(conf);
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(4,compacts.size());
SortedSet partNames=new TreeSet();
for (int i=0; i < compacts.size(); i++) {
Assert.assertEquals("default",compacts.get(i).getDbname());
Assert.assertEquals(tblName,compacts.get(i).getTablename());
Assert.assertEquals("initiated",compacts.get(i).getState());
partNames.add(compacts.get(i).getPartitionname());
}
List names=new ArrayList(partNames);
Assert.assertEquals("ds=last_century",names.get(0));
Assert.assertEquals("ds=soon",names.get(1));
Assert.assertEquals("ds=today",names.get(2));
Assert.assertEquals("ds=yesterday",names.get(3));
executeStatementOnDriver("SELECT * FROM " + tblName + " ORDER BY a",driver);
valuesReadFromHiveDriver=new ArrayList();
driver.getResults(valuesReadFromHiveDriver);
Assert.assertEquals(5,valuesReadFromHiveDriver.size());
Assert.assertEquals("1\tfred\tNULL\ttoday",valuesReadFromHiveDriver.get(0));
Assert.assertEquals("2\twilma\tNULL\tyesterday",valuesReadFromHiveDriver.get(1));
Assert.assertEquals("3\tmark\t1900\tsoon",valuesReadFromHiveDriver.get(2));
Assert.assertEquals("4\tdouglas\t1901\tlast_century",valuesReadFromHiveDriver.get(3));
Assert.assertEquals("5\tdoc\t1902\tyesterday",valuesReadFromHiveDriver.get(4));
}
APIUtilityVerifier BranchVerifier UtilityVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void majorCompactWhileStreaming() throws Exception {
String dbName="default";
String tblName="cws";
List colNames=Arrays.asList("a","b");
String columnNamesProperty="a,b";
String columnTypesProperty="int:string";
executeStatementOnDriver("drop table if exists " + tblName,driver);
executeStatementOnDriver("CREATE TABLE " + tblName + "(a INT, b STRING) "+ " CLUSTERED BY(a) INTO 1 BUCKETS"+ " STORED AS ORC TBLPROPERTIES ('transactional'='true') ",driver);
HiveEndPoint endPt=new HiveEndPoint(null,dbName,tblName,null);
DelimitedInputWriter writer=new DelimitedInputWriter(new String[]{"a","b"},",",endPt);
StreamingConnection connection=endPt.newConnection(false);
try {
for (int i=0; i < 2; i++) {
writeBatch(connection,writer,false);
}
writeBatch(connection,writer,true);
TxnStore txnHandler=TxnUtils.getTxnStore(conf);
txnHandler.compact(new CompactionRequest(dbName,tblName,CompactionType.MAJOR));
Worker t=new Worker();
t.setThreadId((int)t.getId());
t.setHiveConf(conf);
AtomicBoolean stop=new AtomicBoolean(true);
AtomicBoolean looped=new AtomicBoolean();
t.init(stop,looped);
t.run();
IMetaStoreClient msClient=new HiveMetaStoreClient(conf);
Table table=msClient.getTable(dbName,tblName);
FileSystem fs=FileSystem.get(conf);
FileStatus[] stat=fs.listStatus(new Path(table.getSd().getLocation()),AcidUtils.baseFileFilter);
if (1 != stat.length) {
Assert.fail("Expecting 1 file \"base_0000004\" and found " + stat.length + " files "+ Arrays.toString(stat));
}
String name=stat[0].getPath().getName();
Assert.assertEquals(name,"base_0000004");
checkExpectedTxnsPresent(stat[0].getPath(),null,columnNamesProperty,columnTypesProperty,0,1L,4L);
}
finally {
connection.close();
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void schemaEvolutionAddColDynamicPartitioningUpdate() throws Exception {
String tblName="udpct";
List colNames=Arrays.asList("a","b");
executeStatementOnDriver("drop table if exists " + tblName,driver);
executeStatementOnDriver("CREATE TABLE " + tblName + "(a INT, b STRING) "+ " PARTITIONED BY(ds string)"+ " CLUSTERED BY(a) INTO 2 BUCKETS"+ " STORED AS ORC TBLPROPERTIES ('transactional'='true')",driver);
executeStatementOnDriver("insert into " + tblName + " partition (ds) values (1, 'fred', "+ "'today'), (2, 'wilma', 'yesterday')",driver);
executeStatementOnDriver("update " + tblName + " set b = 'barney'",driver);
executeStatementOnDriver("SELECT * FROM " + tblName + " ORDER BY a",driver);
ArrayList valuesReadFromHiveDriver=new ArrayList();
driver.getResults(valuesReadFromHiveDriver);
Assert.assertEquals(2,valuesReadFromHiveDriver.size());
Assert.assertEquals("1\tbarney\ttoday",valuesReadFromHiveDriver.get(0));
Assert.assertEquals("2\tbarney\tyesterday",valuesReadFromHiveDriver.get(1));
executeStatementOnDriver("ALTER TABLE " + tblName + " ADD COLUMNS(c int)",driver);
executeStatementOnDriver("SELECT * FROM " + tblName + " ORDER BY a",driver);
valuesReadFromHiveDriver=new ArrayList();
driver.getResults(valuesReadFromHiveDriver);
Assert.assertEquals(2,valuesReadFromHiveDriver.size());
Assert.assertEquals("1\tbarney\tNULL\ttoday",valuesReadFromHiveDriver.get(0));
Assert.assertEquals("2\tbarney\tNULL\tyesterday",valuesReadFromHiveDriver.get(1));
executeStatementOnDriver("insert into " + tblName + " partition (ds) values "+ "(3, 'mark', 1900, 'soon'), (4, 'douglas', 1901, 'last_century'), "+ "(5, 'doc', 1902, 'yesterday')",driver);
executeStatementOnDriver("SELECT * FROM " + tblName + " ORDER BY a",driver);
valuesReadFromHiveDriver=new ArrayList();
driver.getResults(valuesReadFromHiveDriver);
Assert.assertEquals(5,valuesReadFromHiveDriver.size());
Assert.assertEquals("1\tbarney\tNULL\ttoday",valuesReadFromHiveDriver.get(0));
Assert.assertEquals("2\tbarney\tNULL\tyesterday",valuesReadFromHiveDriver.get(1));
Assert.assertEquals("3\tmark\t1900\tsoon",valuesReadFromHiveDriver.get(2));
Assert.assertEquals("4\tdouglas\t1901\tlast_century",valuesReadFromHiveDriver.get(3));
Assert.assertEquals("5\tdoc\t1902\tyesterday",valuesReadFromHiveDriver.get(4));
executeStatementOnDriver("update " + tblName + " set c = 2000",driver);
executeStatementOnDriver("SELECT * FROM " + tblName + " ORDER BY a",driver);
valuesReadFromHiveDriver=new ArrayList();
driver.getResults(valuesReadFromHiveDriver);
Assert.assertEquals(5,valuesReadFromHiveDriver.size());
Assert.assertEquals("1\tbarney\t2000\ttoday",valuesReadFromHiveDriver.get(0));
Assert.assertEquals("2\tbarney\t2000\tyesterday",valuesReadFromHiveDriver.get(1));
Assert.assertEquals("3\tmark\t2000\tsoon",valuesReadFromHiveDriver.get(2));
Assert.assertEquals("4\tdouglas\t2000\tlast_century",valuesReadFromHiveDriver.get(3));
Assert.assertEquals("5\tdoc\t2000\tyesterday",valuesReadFromHiveDriver.get(4));
Initiator initiator=new Initiator();
initiator.setThreadId((int)initiator.getId());
conf.setIntVar(HiveConf.ConfVars.HIVE_COMPACTOR_DELTA_NUM_THRESHOLD,1);
initiator.setHiveConf(conf);
AtomicBoolean stop=new AtomicBoolean();
stop.set(true);
initiator.init(stop,new AtomicBoolean());
initiator.run();
TxnStore txnHandler=TxnUtils.getTxnStore(conf);
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(4,compacts.size());
SortedSet partNames=new TreeSet();
for (int i=0; i < compacts.size(); i++) {
Assert.assertEquals("default",compacts.get(i).getDbname());
Assert.assertEquals(tblName,compacts.get(i).getTablename());
Assert.assertEquals("initiated",compacts.get(i).getState());
partNames.add(compacts.get(i).getPartitionname());
}
List names=new ArrayList(partNames);
Assert.assertEquals("ds=last_century",names.get(0));
Assert.assertEquals("ds=soon",names.get(1));
Assert.assertEquals("ds=today",names.get(2));
Assert.assertEquals("ds=yesterday",names.get(3));
executeStatementOnDriver("SELECT * FROM " + tblName + " ORDER BY a",driver);
valuesReadFromHiveDriver=new ArrayList();
driver.getResults(valuesReadFromHiveDriver);
Assert.assertEquals(5,valuesReadFromHiveDriver.size());
Assert.assertEquals("1\tbarney\t2000\ttoday",valuesReadFromHiveDriver.get(0));
Assert.assertEquals("2\tbarney\t2000\tyesterday",valuesReadFromHiveDriver.get(1));
Assert.assertEquals("3\tmark\t2000\tsoon",valuesReadFromHiveDriver.get(2));
Assert.assertEquals("4\tdouglas\t2000\tlast_century",valuesReadFromHiveDriver.get(3));
Assert.assertEquals("5\tdoc\t2000\tyesterday",valuesReadFromHiveDriver.get(4));
}
Class: org.apache.hadoop.hive.ql.txn.compactor.TestInitiator InternalCallVerifier EqualityVerifier
@Test public void recoverFailedRemoteWorkers() throws Exception {
Table t=newTable("default","rfrw1",false);
CompactionRequest rqst=new CompactionRequest("default","rfrw1",CompactionType.MINOR);
txnHandler.compact(rqst);
txnHandler.findNextToCompact("nosuchhost-193892");
conf.setTimeVar(HiveConf.ConfVars.HIVE_COMPACTOR_WORKER_TIMEOUT,1L,TimeUnit.MILLISECONDS);
startInitiator();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("initiated",compacts.get(0).getState());
}
InternalCallVerifier EqualityVerifier
@Test public void compactTableTooManyDeltas() throws Exception {
Table t=newTable("default","cttmd",false);
addBaseFile(t,null,200L,200);
addDeltaFile(t,null,201L,201L,1);
addDeltaFile(t,null,202L,202L,1);
addDeltaFile(t,null,203L,203L,1);
addDeltaFile(t,null,204L,204L,1);
addDeltaFile(t,null,205L,205L,1);
addDeltaFile(t,null,206L,206L,1);
addDeltaFile(t,null,207L,207L,1);
addDeltaFile(t,null,208L,208L,1);
addDeltaFile(t,null,209L,209L,1);
addDeltaFile(t,null,210L,210L,1);
addDeltaFile(t,null,211L,211L,1);
burnThroughTransactions(210);
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.TABLE,"default");
comp.setTablename("cttmd");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
txnHandler.commitTxn(new CommitTxnRequest(txnid));
startInitiator();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("initiated",compacts.get(0).getState());
Assert.assertEquals("cttmd",compacts.get(0).getTablename());
Assert.assertEquals(CompactionType.MINOR,compacts.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void enoughDeltasNoBase() throws Exception {
Table t=newTable("default","ednb",true);
Partition p=newPartition(t,"today");
addDeltaFile(t,p,1L,201L,200);
addDeltaFile(t,p,202L,202L,1);
addDeltaFile(t,p,203L,203L,1);
addDeltaFile(t,p,204L,204L,1);
addDeltaFile(t,p,205L,205L,1);
addDeltaFile(t,p,206L,206L,1);
addDeltaFile(t,p,207L,207L,1);
addDeltaFile(t,p,208L,208L,1);
addDeltaFile(t,p,209L,209L,1);
addDeltaFile(t,p,210L,210L,1);
addDeltaFile(t,p,211L,211L,1);
burnThroughTransactions(210);
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.PARTITION,"default");
comp.setTablename("ednb");
comp.setPartitionname("ds=today");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
txnHandler.commitTxn(new CommitTxnRequest(txnid));
startInitiator();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("initiated",compacts.get(0).getState());
Assert.assertEquals("ednb",compacts.get(0).getTablename());
Assert.assertEquals("ds=today",compacts.get(0).getPartitionname());
Assert.assertEquals(CompactionType.MAJOR,compacts.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void dropTable() throws Exception {
Table t=newTable("default","dt",false);
addBaseFile(t,null,20L,20);
addDeltaFile(t,null,21L,22L,2);
addDeltaFile(t,null,23L,24L,2);
burnThroughTransactions(23);
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.PARTITION,"default");
comp.setTablename("dt");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
txnHandler.commitTxn(new CommitTxnRequest(txnid));
ms.dropTable("default","dt");
startInitiator();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(0,compacts.size());
}
InternalCallVerifier EqualityVerifier
@Test public void noCompactTableDynamicPartitioning() throws Exception {
Table t=newTable("default","nctdp",true);
Partition p=newPartition(t,"today");
addBaseFile(t,p,20L,20);
addDeltaFile(t,p,21L,22L,2);
addDeltaFile(t,p,23L,24L,2);
burnThroughTransactions(23);
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.TABLE,"default");
comp.setTablename("nctdp");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
txnHandler.commitTxn(new CommitTxnRequest(txnid));
startInitiator();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(0,compacts.size());
}
EqualityVerifier
@Test public void cleanEmptyAbortedTxns() throws Exception {
Table t=newTable("default","ceat",false);
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.TABLE,"default");
comp.setTablename("ceat");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
txnHandler.abortTxn(new AbortTxnRequest(txnid));
for (int i=0; i < TxnStore.TIMED_OUT_TXN_ABORT_BATCH_SIZE + 50; i++) {
txnid=openTxn();
txnHandler.abortTxn(new AbortTxnRequest(txnid));
}
GetOpenTxnsResponse openTxns=txnHandler.getOpenTxns();
Assert.assertEquals(TxnStore.TIMED_OUT_TXN_ABORT_BATCH_SIZE + 50 + 1,openTxns.getOpen_txnsSize());
startInitiator();
openTxns=txnHandler.getOpenTxns();
Assert.assertEquals(1,openTxns.getOpen_txnsSize());
}
InternalCallVerifier EqualityVerifier
@Test public void chooseMajorOverMinorWhenBothValid() throws Exception {
Table t=newTable("default","cmomwbv",false);
addBaseFile(t,null,200L,200);
addDeltaFile(t,null,201L,211L,11);
addDeltaFile(t,null,212L,222L,11);
addDeltaFile(t,null,223L,233L,11);
addDeltaFile(t,null,234L,244L,11);
addDeltaFile(t,null,245L,255L,11);
addDeltaFile(t,null,256L,266L,11);
addDeltaFile(t,null,267L,277L,11);
addDeltaFile(t,null,278L,288L,11);
addDeltaFile(t,null,289L,299L,11);
addDeltaFile(t,null,300L,310L,11);
addDeltaFile(t,null,311L,321L,11);
burnThroughTransactions(320);
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.TABLE,"default");
comp.setTablename("cmomwbv");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
txnHandler.commitTxn(new CommitTxnRequest(txnid));
startInitiator();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("initiated",compacts.get(0).getState());
Assert.assertEquals("cmomwbv",compacts.get(0).getTablename());
Assert.assertEquals(CompactionType.MAJOR,compacts.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void dropPartition() throws Exception {
Table t=newTable("default","dp",true);
Partition p=newPartition(t,"today");
addBaseFile(t,p,20L,20);
addDeltaFile(t,p,21L,22L,2);
addDeltaFile(t,p,23L,24L,2);
burnThroughTransactions(23);
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.PARTITION,"default");
comp.setTablename("dp");
comp.setPartitionname("ds=today");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
txnHandler.commitTxn(new CommitTxnRequest(txnid));
ms.dropPartition("default","dp",Collections.singletonList("today"),true);
startInitiator();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(0,compacts.size());
}
InternalCallVerifier EqualityVerifier
@Test public void noCompactWhenCompactAlreadyScheduled() throws Exception {
Table t=newTable("default","ncwcas",false);
HiveConf.setIntVar(conf,HiveConf.ConfVars.HIVE_COMPACTOR_ABORTEDTXN_THRESHOLD,10);
for (int i=0; i < 11; i++) {
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.TABLE,"default");
comp.setTablename("ncwcas");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
txnHandler.abortTxn(new AbortTxnRequest(txnid));
}
CompactionRequest rqst=new CompactionRequest("default","ncwcas",CompactionType.MAJOR);
txnHandler.compact(rqst);
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("initiated",compacts.get(0).getState());
Assert.assertEquals("ncwcas",compacts.get(0).getTablename());
startInitiator();
rsp=txnHandler.showCompact(new ShowCompactRequest());
compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("initiated",compacts.get(0).getState());
Assert.assertEquals("ncwcas",compacts.get(0).getTablename());
Assert.assertEquals(CompactionType.MAJOR,compacts.get(0).getType());
}
EqualityVerifier
@Test public void noCompactTableNotEnoughDeltas() throws Exception {
Table t=newTable("default","nctned",false);
addBaseFile(t,null,200L,200);
addDeltaFile(t,null,201L,205L,5);
addDeltaFile(t,null,206L,211L,6);
burnThroughTransactions(210);
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.TABLE,"default");
comp.setTablename("nctned");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
txnHandler.commitTxn(new CommitTxnRequest(txnid));
startInitiator();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
Assert.assertEquals(0,rsp.getCompactsSize());
}
InternalCallVerifier EqualityVerifier
@Test public void compactPartitionHighDeltaPct() throws Exception {
Table t=newTable("default","cphdp",true);
Partition p=newPartition(t,"today");
addBaseFile(t,p,20L,20);
addDeltaFile(t,p,21L,22L,2);
addDeltaFile(t,p,23L,24L,2);
burnThroughTransactions(23);
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.PARTITION,"default");
comp.setTablename("cphdp");
comp.setPartitionname("ds=today");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
txnHandler.commitTxn(new CommitTxnRequest(txnid));
startInitiator();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("initiated",compacts.get(0).getState());
Assert.assertEquals("cphdp",compacts.get(0).getTablename());
Assert.assertEquals("ds=today",compacts.get(0).getPartitionname());
Assert.assertEquals(CompactionType.MAJOR,compacts.get(0).getType());
}
EqualityVerifier
@Test public void noCompactTableDeltaPctNotHighEnough() throws Exception {
Table t=newTable("default","nctdpnhe",false);
addBaseFile(t,null,50L,50);
addDeltaFile(t,null,21L,22L,2);
addDeltaFile(t,null,23L,24L,2);
burnThroughTransactions(53);
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.TABLE,"default");
comp.setTablename("nctdpnhe");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
txnHandler.commitTxn(new CommitTxnRequest(txnid));
startInitiator();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
Assert.assertEquals(0,rsp.getCompactsSize());
}
EqualityVerifier
@Test public void noCompactOnManyDifferentPartitionAborts() throws Exception {
Table t=newTable("default","ncomdpa",true);
for (int i=0; i < 11; i++) {
Partition p=newPartition(t,"day-" + i);
}
HiveConf.setIntVar(conf,HiveConf.ConfVars.HIVE_COMPACTOR_ABORTEDTXN_THRESHOLD,10);
for (int i=0; i < 11; i++) {
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.TABLE,"default");
comp.setTablename("ncomdpa");
comp.setPartitionname("ds=day-" + i);
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
txnHandler.abortTxn(new AbortTxnRequest(txnid));
}
startInitiator();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
Assert.assertEquals(0,rsp.getCompactsSize());
}
BranchVerifier UtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void recoverFailedLocalWorkers() throws Exception {
Table t=newTable("default","rflw1",false);
CompactionRequest rqst=new CompactionRequest("default","rflw1",CompactionType.MINOR);
txnHandler.compact(rqst);
t=newTable("default","rflw2",false);
rqst=new CompactionRequest("default","rflw2",CompactionType.MINOR);
txnHandler.compact(rqst);
txnHandler.findNextToCompact(Worker.hostname() + "-193892");
txnHandler.findNextToCompact("nosuchhost-193892");
startInitiator();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(2,compacts.size());
boolean sawInitiated=false;
for ( ShowCompactResponseElement c : compacts) {
if (c.getState().equals("working")) {
Assert.assertEquals("nosuchhost-193892",c.getWorkerid());
}
else if (c.getState().equals("initiated")) {
sawInitiated=true;
}
else {
Assert.fail("Unexpected state");
}
}
Assert.assertTrue(sawInitiated);
}
EqualityVerifier
@Test public void noCompactWhenNoCompactSet() throws Exception {
Map parameters=new HashMap(1);
parameters.put("NO_AUTO_COMPACTION","true");
Table t=newTable("default","ncwncs",false,parameters);
HiveConf.setIntVar(conf,HiveConf.ConfVars.HIVE_COMPACTOR_ABORTEDTXN_THRESHOLD,10);
for (int i=0; i < 11; i++) {
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.TABLE,"default");
comp.setTablename("ncwncs");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
txnHandler.abortTxn(new AbortTxnRequest(txnid));
}
startInitiator();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
Assert.assertEquals(0,rsp.getCompactsSize());
}
InternalCallVerifier EqualityVerifier
@Test public void twoTxnsOnSamePartitionGenerateOneCompactionRequest() throws Exception {
Table t=newTable("default","ttospgocr",true);
Partition p=newPartition(t,"today");
addBaseFile(t,p,20L,20);
addDeltaFile(t,p,21L,22L,2);
addDeltaFile(t,p,23L,24L,2);
burnThroughTransactions(23);
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.PARTITION,"default");
comp.setTablename("ttospgocr");
comp.setPartitionname("ds=today");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
txnHandler.commitTxn(new CommitTxnRequest(txnid));
txnid=openTxn();
comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.PARTITION,"default");
comp.setTablename("ttospgocr");
comp.setPartitionname("ds=today");
components=new ArrayList(1);
components.add(comp);
req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
res=txnHandler.lock(req);
txnHandler.commitTxn(new CommitTxnRequest(txnid));
startInitiator();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("initiated",compacts.get(0).getState());
Assert.assertEquals("ttospgocr",compacts.get(0).getTablename());
Assert.assertEquals("ds=today",compacts.get(0).getPartitionname());
Assert.assertEquals(CompactionType.MAJOR,compacts.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void compactPartitionTooManyDeltas() throws Exception {
Table t=newTable("default","cptmd",true);
Partition p=newPartition(t,"today");
addBaseFile(t,p,200L,200);
addDeltaFile(t,p,201L,201L,1);
addDeltaFile(t,p,202L,202L,1);
addDeltaFile(t,p,203L,203L,1);
addDeltaFile(t,p,204L,204L,1);
addDeltaFile(t,p,205L,205L,1);
addDeltaFile(t,p,206L,206L,1);
addDeltaFile(t,p,207L,207L,1);
addDeltaFile(t,p,208L,208L,1);
addDeltaFile(t,p,209L,209L,1);
addDeltaFile(t,p,210L,210L,1);
addDeltaFile(t,p,211L,211L,1);
burnThroughTransactions(210);
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.PARTITION,"default");
comp.setTablename("cptmd");
comp.setPartitionname("ds=today");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
txnHandler.commitTxn(new CommitTxnRequest(txnid));
startInitiator();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("initiated",compacts.get(0).getState());
Assert.assertEquals("cptmd",compacts.get(0).getTablename());
Assert.assertEquals("ds=today",compacts.get(0).getPartitionname());
Assert.assertEquals(CompactionType.MINOR,compacts.get(0).getType());
}
EqualityVerifier
@Test public void noCompactWhenNoCompactSetLowerCase() throws Exception {
Map parameters=new HashMap(1);
parameters.put("no_auto_compaction","true");
Table t=newTable("default","ncwncs",false,parameters);
HiveConf.setIntVar(conf,HiveConf.ConfVars.HIVE_COMPACTOR_ABORTEDTXN_THRESHOLD,10);
for (int i=0; i < 11; i++) {
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.TABLE,"default");
comp.setTablename("ncwncs");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
txnHandler.abortTxn(new AbortTxnRequest(txnid));
}
startInitiator();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
Assert.assertEquals(0,rsp.getCompactsSize());
}
InternalCallVerifier EqualityVerifier
@Test public void majorCompactOnTableTooManyAborts() throws Exception {
Table t=newTable("default","mcottma",false);
HiveConf.setIntVar(conf,HiveConf.ConfVars.HIVE_COMPACTOR_ABORTEDTXN_THRESHOLD,10);
for (int i=0; i < 11; i++) {
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.TABLE,"default");
comp.setTablename("mcottma");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
txnHandler.abortTxn(new AbortTxnRequest(txnid));
}
startInitiator();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("initiated",compacts.get(0).getState());
Assert.assertEquals("mcottma",compacts.get(0).getTablename());
Assert.assertEquals(CompactionType.MAJOR,compacts.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void compactTableHighDeltaPct() throws Exception {
Table t=newTable("default","cthdp",false);
addBaseFile(t,null,20L,20);
addDeltaFile(t,null,21L,22L,2);
addDeltaFile(t,null,23L,24L,2);
burnThroughTransactions(23);
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.TABLE,"default");
comp.setTablename("cthdp");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
txnHandler.commitTxn(new CommitTxnRequest(txnid));
startInitiator();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("initiated",compacts.get(0).getState());
Assert.assertEquals("cthdp",compacts.get(0).getTablename());
Assert.assertEquals(CompactionType.MAJOR,compacts.get(0).getType());
}
InternalCallVerifier EqualityVerifier
@Test public void majorCompactOnPartitionTooManyAborts() throws Exception {
Table t=newTable("default","mcoptma",true);
Partition p=newPartition(t,"today");
HiveConf.setIntVar(conf,HiveConf.ConfVars.HIVE_COMPACTOR_ABORTEDTXN_THRESHOLD,10);
for (int i=0; i < 11; i++) {
long txnid=openTxn();
LockComponent comp=new LockComponent(LockType.SHARED_WRITE,LockLevel.TABLE,"default");
comp.setTablename("mcoptma");
comp.setPartitionname("ds=today");
List components=new ArrayList(1);
components.add(comp);
LockRequest req=new LockRequest(components,"me","localhost");
req.setTxnid(txnid);
LockResponse res=txnHandler.lock(req);
txnHandler.abortTxn(new AbortTxnRequest(txnid));
}
startInitiator();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("initiated",compacts.get(0).getState());
Assert.assertEquals("mcoptma",compacts.get(0).getTablename());
Assert.assertEquals("ds=today",compacts.get(0).getPartitionname());
Assert.assertEquals(CompactionType.MAJOR,compacts.get(0).getType());
}
Class: org.apache.hadoop.hive.ql.txn.compactor.TestWorker BranchVerifier UtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void stringableList() throws Exception {
CompactorMR.StringableList ls=new CompactorMR.StringableList();
String s=ls.toString();
Assert.assertEquals("0:",s);
ls=new CompactorMR.StringableList(s);
Assert.assertEquals(0,ls.size());
ls=new CompactorMR.StringableList();
ls.add(new Path("/tmp"));
ls.add(new Path("/usr"));
s=ls.toString();
Assert.assertTrue("Expected 2:4:/tmp4:/usr or 2:4:/usr4:/tmp, got " + s,"2:4:/tmp4:/usr".equals(s) || "2:4:/usr4:/tmp".equals(s));
ls=new CompactorMR.StringableList(s);
Assert.assertEquals(2,ls.size());
boolean sawTmp=false, sawUsr=false;
for ( Path p : ls) {
if ("/tmp".equals(p.toString())) sawTmp=true;
else if ("/usr".equals(p.toString())) sawUsr=true;
else Assert.fail("Unexpected path " + p.toString());
}
Assert.assertTrue(sawTmp);
Assert.assertTrue(sawUsr);
}
IterativeVerifier BranchVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void minorTableWithBase() throws Exception {
LOG.debug("Starting minorTableWithBase");
Table t=newTable("default","mtwb",false);
addBaseFile(t,null,20L,20);
addDeltaFile(t,null,21L,22L,2);
addDeltaFile(t,null,23L,24L,2);
burnThroughTransactions(25);
CompactionRequest rqst=new CompactionRequest("default","mtwb",CompactionType.MINOR);
txnHandler.compact(rqst);
startWorker();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("ready for cleaning",compacts.get(0).getState());
FileSystem fs=FileSystem.get(conf);
FileStatus[] stat=fs.listStatus(new Path(t.getSd().getLocation()));
Assert.assertEquals(4,stat.length);
boolean sawNewDelta=false;
for (int i=0; i < stat.length; i++) {
if (stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(21,24))) {
sawNewDelta=true;
FileStatus[] buckets=fs.listStatus(stat[i].getPath());
Assert.assertEquals(2,buckets.length);
Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]"));
Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]"));
Assert.assertEquals(208L,buckets[0].getLen());
Assert.assertEquals(208L,buckets[1].getLen());
}
else {
LOG.debug("This is not the delta file you are looking for " + stat[i].getPath().getName());
}
}
Assert.assertTrue(sawNewDelta);
}
InternalCallVerifier EqualityVerifier
@Test public void minorWithAborted() throws Exception {
LOG.debug("Starting minorWithAborted");
Table t=newTable("default","mtwb",false);
addBaseFile(t,null,20L,20);
addDeltaFile(t,null,21L,22L,2);
addDeltaFile(t,null,23L,25L,3);
addLengthFile(t,null,23L,25L,3);
addDeltaFile(t,null,26L,27L,2);
burnThroughTransactions(27,null,new HashSet(Arrays.asList(24L,25L)));
CompactionRequest rqst=new CompactionRequest("default","mtwb",CompactionType.MINOR);
txnHandler.compact(rqst);
startWorker();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("ready for cleaning",compacts.get(0).getState());
FileSystem fs=FileSystem.get(conf);
FileStatus[] stat=fs.listStatus(new Path(t.getSd().getLocation()));
Assert.assertEquals(5,stat.length);
Arrays.sort(stat);
Assert.assertEquals("base_20",stat[0].getPath().getName());
Assert.assertEquals(makeDeltaDirName(21,22),stat[1].getPath().getName());
Assert.assertEquals(makeDeltaDirNameCompacted(21,27),stat[2].getPath().getName());
Assert.assertEquals(makeDeltaDirName(23,25),stat[3].getPath().getName());
Assert.assertEquals(makeDeltaDirName(26,27),stat[4].getPath().getName());
}
APIUtilityVerifier IterativeVerifier BranchVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void majorPartitionWithBase() throws Exception {
LOG.debug("Starting majorPartitionWithBase");
Table t=newTable("default","mapwb",true);
Partition p=newPartition(t,"today");
addBaseFile(t,p,20L,20);
addDeltaFile(t,p,21L,22L,2);
addDeltaFile(t,p,23L,24L,2);
burnThroughTransactions(25);
CompactionRequest rqst=new CompactionRequest("default","mapwb",CompactionType.MAJOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
startWorker();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("ready for cleaning",compacts.get(0).getState());
FileSystem fs=FileSystem.get(conf);
FileStatus[] stat=fs.listStatus(new Path(p.getSd().getLocation()));
Assert.assertEquals(4,stat.length);
boolean sawNewBase=false;
for (int i=0; i < stat.length; i++) {
if (stat[i].getPath().getName().equals("base_0000024")) {
sawNewBase=true;
FileStatus[] buckets=fs.listStatus(stat[i].getPath());
Assert.assertEquals(2,buckets.length);
Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]"));
Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]"));
Assert.assertEquals(624L,buckets[0].getLen());
Assert.assertEquals(624L,buckets[1].getLen());
}
else {
LOG.debug("This is not the file you are looking for " + stat[i].getPath().getName());
}
}
Assert.assertTrue(sawNewBase);
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void droppedTable() throws Exception {
Table t=newTable("default","dt",false);
addDeltaFile(t,null,1L,2L,2);
addDeltaFile(t,null,3L,4L,2);
burnThroughTransactions(4);
CompactionRequest rqst=new CompactionRequest("default","dt",CompactionType.MAJOR);
txnHandler.compact(rqst);
ms.dropTable("default","dt");
startWorker();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertTrue(TxnStore.SUCCEEDED_RESPONSE.equals(compacts.get(0).getState()));
}
InternalCallVerifier EqualityVerifier
@Test public void majorWithOpenInMiddle() throws Exception {
LOG.debug("Starting majorWithOpenInMiddle");
Table t=newTable("default","mtwb",false);
addBaseFile(t,null,20L,20);
addDeltaFile(t,null,21L,22L,2);
addDeltaFile(t,null,23L,25L,3);
addLengthFile(t,null,23L,25L,3);
addDeltaFile(t,null,26L,27L,2);
burnThroughTransactions(27,new HashSet(Arrays.asList(23L)),null);
CompactionRequest rqst=new CompactionRequest("default","mtwb",CompactionType.MAJOR);
txnHandler.compact(rqst);
startWorker();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("ready for cleaning",compacts.get(0).getState());
FileSystem fs=FileSystem.get(conf);
FileStatus[] stat=fs.listStatus(new Path(t.getSd().getLocation()));
Assert.assertEquals(5,stat.length);
Arrays.sort(stat);
Assert.assertEquals("base_0000022",stat[0].getPath().getName());
Assert.assertEquals("base_20",stat[1].getPath().getName());
Assert.assertEquals(makeDeltaDirName(21,22),stat[2].getPath().getName());
Assert.assertEquals(makeDeltaDirName(23,25),stat[3].getPath().getName());
Assert.assertEquals(makeDeltaDirName(26,27),stat[4].getPath().getName());
}
APIUtilityVerifier IterativeVerifier BranchVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void minorPartitionWithBase() throws Exception {
Table t=newTable("default","mpwb",true);
Partition p=newPartition(t,"today");
addBaseFile(t,p,20L,20);
addDeltaFile(t,p,21L,22L,2);
addDeltaFile(t,p,23L,24L,2);
burnThroughTransactions(25);
CompactionRequest rqst=new CompactionRequest("default","mpwb",CompactionType.MINOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
startWorker();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("ready for cleaning",compacts.get(0).getState());
FileSystem fs=FileSystem.get(conf);
FileStatus[] stat=fs.listStatus(new Path(p.getSd().getLocation()));
Assert.assertEquals(4,stat.length);
boolean sawNewDelta=false;
for (int i=0; i < stat.length; i++) {
if (stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(21,24))) {
sawNewDelta=true;
FileStatus[] buckets=fs.listStatus(stat[i].getPath());
Assert.assertEquals(2,buckets.length);
Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]"));
Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]"));
Assert.assertEquals(208L,buckets[0].getLen());
Assert.assertEquals(208L,buckets[1].getLen());
}
else {
LOG.debug("This is not the delta file you are looking for " + stat[i].getPath().getName());
}
}
Assert.assertTrue(sawNewDelta);
}
IterativeVerifier BranchVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void minorTableLegacy() throws Exception {
LOG.debug("Starting minorTableLegacy");
Table t=newTable("default","mtl",false);
addLegacyFile(t,null,20);
addDeltaFile(t,null,21L,22L,2);
addDeltaFile(t,null,23L,24L,2);
burnThroughTransactions(25);
CompactionRequest rqst=new CompactionRequest("default","mtl",CompactionType.MINOR);
txnHandler.compact(rqst);
startWorker();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("ready for cleaning",compacts.get(0).getState());
FileSystem fs=FileSystem.get(conf);
FileStatus[] stat=fs.listStatus(new Path(t.getSd().getLocation()));
boolean sawNewDelta=false;
for (int i=0; i < stat.length; i++) {
if (stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(21,24))) {
sawNewDelta=true;
FileStatus[] buckets=fs.listStatus(stat[i].getPath());
Assert.assertEquals(2,buckets.length);
Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]"));
Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]"));
}
else {
LOG.debug("This is not the file you are looking for " + stat[i].getPath().getName());
}
}
Assert.assertTrue(sawNewDelta);
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void inputSplitNullBase() throws Exception {
String delta1="/warehouse/foo/delta_2_3";
String delta2="/warehouse/foo/delta_4_7";
HiveConf conf=new HiveConf();
Path file=new Path(System.getProperty("java.io.tmpdir") + System.getProperty("file.separator") + "newWriteInputSplitTest");
FileSystem fs=FileSystem.get(conf);
FSDataOutputStream os=fs.create(file);
for (int i=0; i < 10; i++) {
os.writeBytes("mary had a little lamb its fleece was white as snow\n");
}
os.close();
List files=new ArrayList(1);
files.add(file);
Path[] deltas=new Path[2];
deltas[0]=new Path(delta1);
deltas[1]=new Path(delta2);
CompactorMR.CompactorInputSplit split=new CompactorMR.CompactorInputSplit(conf,3,files,null,deltas);
ByteArrayOutputStream buf=new ByteArrayOutputStream();
DataOutput out=new DataOutputStream(buf);
split.write(out);
split=new CompactorMR.CompactorInputSplit();
DataInput in=new DataInputStream(new ByteArrayInputStream(buf.toByteArray()));
split.readFields(in);
Assert.assertEquals(3,split.getBucket());
Assert.assertNull(split.getBaseDir());
deltas=split.getDeltaDirs();
Assert.assertEquals(2,deltas.length);
Assert.assertEquals(delta1,deltas[0].toString());
Assert.assertEquals(delta2,deltas[1].toString());
}
InternalCallVerifier EqualityVerifier
/**
* todo: fix https://issues.apache.org/jira/browse/HIVE-9995
* @throws Exception
*/
@Test public void minorWithOpenInMiddle() throws Exception {
LOG.debug("Starting minorWithOpenInMiddle");
Table t=newTable("default","mtwb",false);
addBaseFile(t,null,20L,20);
addDeltaFile(t,null,21L,22L,2);
addDeltaFile(t,null,23L,25L,3);
addLengthFile(t,null,23L,25L,3);
addDeltaFile(t,null,26L,27L,2);
burnThroughTransactions(27,new HashSet(Arrays.asList(23L)),null);
CompactionRequest rqst=new CompactionRequest("default","mtwb",CompactionType.MINOR);
txnHandler.compact(rqst);
startWorker();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("ready for cleaning",compacts.get(0).getState());
FileSystem fs=FileSystem.get(conf);
FileStatus[] stat=fs.listStatus(new Path(t.getSd().getLocation()));
Assert.assertEquals(4,stat.length);
Arrays.sort(stat);
Assert.assertEquals("base_20",stat[0].getPath().getName());
Assert.assertEquals(makeDeltaDirNameCompacted(21,22),stat[1].getPath().getName());
Assert.assertEquals(makeDeltaDirName(23,25),stat[2].getPath().getName());
Assert.assertEquals(makeDeltaDirName(26,27),stat[3].getPath().getName());
}
IterativeVerifier BranchVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void minorTableNoBase() throws Exception {
LOG.debug("Starting minorTableWithBase");
Table t=newTable("default","mtnb",false);
addDeltaFile(t,null,1L,2L,2);
addDeltaFile(t,null,3L,4L,2);
burnThroughTransactions(5);
CompactionRequest rqst=new CompactionRequest("default","mtnb",CompactionType.MINOR);
txnHandler.compact(rqst);
startWorker();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("ready for cleaning",compacts.get(0).getState());
FileSystem fs=FileSystem.get(conf);
FileStatus[] stat=fs.listStatus(new Path(t.getSd().getLocation()));
Assert.assertEquals(3,stat.length);
boolean sawNewDelta=false;
for (int i=0; i < stat.length; i++) {
if (stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(1,4))) {
sawNewDelta=true;
FileStatus[] buckets=fs.listStatus(stat[i].getPath());
Assert.assertEquals(2,buckets.length);
Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]"));
Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]"));
Assert.assertEquals(208L,buckets[0].getLen());
Assert.assertEquals(208L,buckets[1].getLen());
}
else {
LOG.debug("This is not the delta file you are looking for " + stat[i].getPath().getName());
}
}
Assert.assertTrue(sawNewDelta);
}
InternalCallVerifier EqualityVerifier
@Test public void majorWithAborted() throws Exception {
LOG.debug("Starting majorWithAborted");
Table t=newTable("default","mtwb",false);
addBaseFile(t,null,20L,20);
addDeltaFile(t,null,21L,22L,2);
addDeltaFile(t,null,23L,25L,3);
addLengthFile(t,null,23L,25L,3);
addDeltaFile(t,null,26L,27L,2);
burnThroughTransactions(27,null,new HashSet(Arrays.asList(24L,25L)));
CompactionRequest rqst=new CompactionRequest("default","mtwb",CompactionType.MAJOR);
txnHandler.compact(rqst);
startWorker();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("ready for cleaning",compacts.get(0).getState());
FileSystem fs=FileSystem.get(conf);
FileStatus[] stat=fs.listStatus(new Path(t.getSd().getLocation()));
Assert.assertEquals(5,stat.length);
Arrays.sort(stat);
Assert.assertEquals("base_0000027",stat[0].getPath().getName());
Assert.assertEquals("base_20",stat[1].getPath().getName());
Assert.assertEquals(makeDeltaDirName(21,22),stat[2].getPath().getName());
Assert.assertEquals(makeDeltaDirName(23,25),stat[3].getPath().getName());
Assert.assertEquals(makeDeltaDirName(26,27),stat[4].getPath().getName());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void sortedPartition() throws Exception {
List sortCols=new ArrayList(1);
sortCols.add(new Order("b",1));
Table t=newTable("default","sp",true,new HashMap(),sortCols,false);
Partition p=newPartition(t,"today",sortCols);
addBaseFile(t,p,20L,20);
addDeltaFile(t,p,21L,22L,2);
addDeltaFile(t,p,23L,24L,2);
addDeltaFile(t,p,21L,24L,4);
burnThroughTransactions(25);
CompactionRequest rqst=new CompactionRequest("default","sp",CompactionType.MINOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
startWorker();
FileSystem fs=FileSystem.get(conf);
FileStatus[] stat=fs.listStatus(new Path(p.getSd().getLocation()));
Assert.assertEquals(4,stat.length);
}
APIUtilityVerifier IterativeVerifier BranchVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void majorPartitionWithBaseMissingBuckets() throws Exception {
LOG.debug("Starting majorPartitionWithBaseMissingBuckets");
Table t=newTable("default","mapwbmb",true);
Partition p=newPartition(t,"today");
addBaseFile(t,p,20L,20,2,false);
addDeltaFile(t,p,21L,22L,2,2,false);
addDeltaFile(t,p,23L,26L,4);
burnThroughTransactions(27);
CompactionRequest rqst=new CompactionRequest("default","mapwbmb",CompactionType.MAJOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
startWorker();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("ready for cleaning",compacts.get(0).getState());
FileSystem fs=FileSystem.get(conf);
FileStatus[] stat=fs.listStatus(new Path(p.getSd().getLocation()));
Assert.assertEquals(4,stat.length);
boolean sawNewBase=false;
for (int i=0; i < stat.length; i++) {
if (stat[i].getPath().getName().equals("base_0000026")) {
sawNewBase=true;
FileStatus[] buckets=fs.listStatus(stat[i].getPath());
Assert.assertEquals(2,buckets.length);
Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]"));
Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]"));
Assert.assertTrue(("bucket_00000".equals(buckets[0].getPath().getName()) && 104L == buckets[0].getLen() && "bucket_00001".equals(buckets[1].getPath().getName()) && 676L == buckets[1].getLen()) || ("bucket_00000".equals(buckets[1].getPath().getName()) && 104L == buckets[1].getLen() && "bucket_00001".equals(buckets[0].getPath().getName()) && 676L == buckets[0].getLen()));
}
else {
LOG.debug("This is not the file you are looking for " + stat[i].getPath().getName());
}
}
Assert.assertTrue(sawNewBase);
}
IterativeVerifier BranchVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void majorTableWithBase() throws Exception {
LOG.debug("Starting majorTableWithBase");
Table t=newTable("default","matwb",false);
addBaseFile(t,null,20L,20);
addDeltaFile(t,null,21L,22L,2);
addDeltaFile(t,null,23L,24L,2);
burnThroughTransactions(25);
CompactionRequest rqst=new CompactionRequest("default","matwb",CompactionType.MAJOR);
txnHandler.compact(rqst);
startWorker();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("ready for cleaning",compacts.get(0).getState());
FileSystem fs=FileSystem.get(conf);
FileStatus[] stat=fs.listStatus(new Path(t.getSd().getLocation()));
Assert.assertEquals(4,stat.length);
boolean sawNewBase=false;
for (int i=0; i < stat.length; i++) {
if (stat[i].getPath().getName().equals("base_0000024")) {
sawNewBase=true;
FileStatus[] buckets=fs.listStatus(stat[i].getPath());
Assert.assertEquals(2,buckets.length);
Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]"));
Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]"));
Assert.assertEquals(624L,buckets[0].getLen());
Assert.assertEquals(624L,buckets[1].getLen());
}
else {
LOG.debug("This is not the file you are looking for " + stat[i].getPath().getName());
}
}
Assert.assertTrue(sawNewBase);
}
BranchVerifier UtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void stringableMap() throws Exception {
CompactorMR.StringableMap m=new CompactorMR.StringableMap(new HashMap());
String s=m.toString();
Assert.assertEquals("0:",s);
m=new CompactorMR.StringableMap(s);
Assert.assertEquals(0,m.size());
Map base=new HashMap();
base.put("mary","poppins");
base.put("bert",null);
base.put(null,"banks");
m=new CompactorMR.StringableMap(base);
s=m.toString();
m=new CompactorMR.StringableMap(s);
Assert.assertEquals(3,m.size());
Map saw=new HashMap(3);
saw.put("mary",false);
saw.put("bert",false);
saw.put(null,false);
for ( Map.Entry e : m.entrySet()) {
saw.put(e.getKey(),true);
if ("mary".equals(e.getKey())) Assert.assertEquals("poppins",e.getValue());
else if ("bert".equals(e.getKey())) Assert.assertNull(e.getValue());
else if (null == e.getKey()) Assert.assertEquals("banks",e.getValue());
else Assert.fail("Unexpected value " + e.getKey());
}
Assert.assertEquals(3,saw.size());
Assert.assertTrue(saw.get("mary"));
Assert.assertTrue(saw.get("bert"));
Assert.assertTrue(saw.get(null));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void sortedTable() throws Exception {
List sortCols=new ArrayList(1);
sortCols.add(new Order("b",1));
Table t=newTable("default","st",false,new HashMap(),sortCols,false);
addBaseFile(t,null,20L,20);
addDeltaFile(t,null,21L,22L,2);
addDeltaFile(t,null,23L,24L,2);
addDeltaFile(t,null,21L,24L,4);
burnThroughTransactions(25);
CompactionRequest rqst=new CompactionRequest("default","st",CompactionType.MINOR);
txnHandler.compact(rqst);
startWorker();
FileSystem fs=FileSystem.get(conf);
FileStatus[] stat=fs.listStatus(new Path(t.getSd().getLocation()));
Assert.assertEquals(4,stat.length);
}
IterativeVerifier BranchVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void majorTableLegacy() throws Exception {
LOG.debug("Starting majorTableLegacy");
Table t=newTable("default","matl",false);
addLegacyFile(t,null,20);
addDeltaFile(t,null,21L,22L,2);
addDeltaFile(t,null,23L,24L,2);
burnThroughTransactions(25);
CompactionRequest rqst=new CompactionRequest("default","matl",CompactionType.MAJOR);
txnHandler.compact(rqst);
startWorker();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("ready for cleaning",compacts.get(0).getState());
FileSystem fs=FileSystem.get(conf);
FileStatus[] stat=fs.listStatus(new Path(t.getSd().getLocation()));
boolean sawNewBase=false;
for (int i=0; i < stat.length; i++) {
if (stat[i].getPath().getName().equals("base_0000024")) {
sawNewBase=true;
FileStatus[] buckets=fs.listStatus(stat[i].getPath());
Assert.assertEquals(2,buckets.length);
Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]"));
Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]"));
Assert.assertEquals(624L,buckets[0].getLen());
Assert.assertEquals(624L,buckets[1].getLen());
}
else {
LOG.debug("This is not the file you are looking for " + stat[i].getPath().getName());
}
}
Assert.assertTrue(sawNewBase);
}
InternalCallVerifier EqualityVerifier
@Test public void inputSplit() throws Exception {
String basename="/warehouse/foo/base_1";
String delta1="/warehouse/foo/delta_2_3";
String delta2="/warehouse/foo/delta_4_7";
HiveConf conf=new HiveConf();
Path file=new Path(System.getProperty("java.io.tmpdir") + System.getProperty("file.separator") + "newWriteInputSplitTest");
FileSystem fs=FileSystem.get(conf);
FSDataOutputStream os=fs.create(file);
for (int i=0; i < 10; i++) {
os.writeBytes("mary had a little lamb its fleece was white as snow\n");
}
os.close();
List files=new ArrayList(1);
files.add(file);
Path[] deltas=new Path[2];
deltas[0]=new Path(delta1);
deltas[1]=new Path(delta2);
CompactorMR.CompactorInputSplit split=new CompactorMR.CompactorInputSplit(conf,3,files,new Path(basename),deltas);
Assert.assertEquals(520L,split.getLength());
String[] locations=split.getLocations();
Assert.assertEquals(1,locations.length);
Assert.assertEquals("localhost",locations[0]);
ByteArrayOutputStream buf=new ByteArrayOutputStream();
DataOutput out=new DataOutputStream(buf);
split.write(out);
split=new CompactorMR.CompactorInputSplit();
DataInput in=new DataInputStream(new ByteArrayInputStream(buf.toByteArray()));
split.readFields(in);
Assert.assertEquals(3,split.getBucket());
Assert.assertEquals(basename,split.getBaseDir().toString());
deltas=split.getDeltaDirs();
Assert.assertEquals(2,deltas.length);
Assert.assertEquals(delta1,deltas[0].toString());
Assert.assertEquals(delta2,deltas[1].toString());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void droppedPartition() throws Exception {
Table t=newTable("default","dp",true);
Partition p=newPartition(t,"today");
addBaseFile(t,p,20L,20);
addDeltaFile(t,p,21L,22L,2);
addDeltaFile(t,p,23L,24L,2);
burnThroughTransactions(25);
CompactionRequest rqst=new CompactionRequest("default","dp",CompactionType.MINOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
ms.dropPartition("default","dp",Collections.singletonList("today"),true);
startWorker();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertTrue(TxnStore.SUCCEEDED_RESPONSE.equals(rsp.getCompacts().get(0).getState()));
}
IterativeVerifier BranchVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void majorTableNoBase() throws Exception {
LOG.debug("Starting majorTableNoBase");
Table t=newTable("default","matnb",false);
addDeltaFile(t,null,1L,2L,2);
addDeltaFile(t,null,3L,4L,2);
burnThroughTransactions(4);
CompactionRequest rqst=new CompactionRequest("default","matnb",CompactionType.MAJOR);
txnHandler.compact(rqst);
startWorker();
ShowCompactResponse rsp=txnHandler.showCompact(new ShowCompactRequest());
List compacts=rsp.getCompacts();
Assert.assertEquals(1,compacts.size());
Assert.assertEquals("ready for cleaning",compacts.get(0).getState());
FileSystem fs=FileSystem.get(conf);
FileStatus[] stat=fs.listStatus(new Path(t.getSd().getLocation()));
Assert.assertEquals(3,stat.length);
boolean sawNewBase=false;
for (int i=0; i < stat.length; i++) {
if (stat[i].getPath().getName().equals("base_0000004")) {
sawNewBase=true;
FileStatus[] buckets=fs.listStatus(stat[i].getPath());
Assert.assertEquals(2,buckets.length);
Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]"));
Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]"));
Assert.assertEquals(104L,buckets[0].getLen());
Assert.assertEquals(104L,buckets[1].getLen());
}
else {
LOG.debug("This is not the file you are looking for " + stat[i].getPath().getName());
}
}
Assert.assertTrue(sawNewBase);
}
Class: org.apache.hadoop.hive.ql.udf.TestBlockedUdf EqualityVerifier
/**
* Verify that UDF in the whitelist can be access
* @throws Exception
*/
@Test public void testDefaultWhiteList() throws Exception {
assertEquals("",new HiveConf().getVar(ConfVars.HIVE_SERVER2_BUILTIN_UDF_WHITELIST));
assertEquals("",new HiveConf().getVar(ConfVars.HIVE_SERVER2_BUILTIN_UDF_BLACKLIST));
FunctionRegistry.setupPermissionsForBuiltinUDFs("","");
assertEquals("substr",FunctionRegistry.getFunctionInfo("substr").getDisplayName());
}
EqualityVerifier
/**
* Verify that UDF in the whitelist can be access
* @throws Exception
*/
@Test public void testUdfInWhiteList() throws Exception {
Set funcNames=FunctionRegistry.getFunctionNames();
funcNames.remove("reflect");
FunctionRegistry.setupPermissionsForBuiltinUDFs(funcNames.toString(),"");
assertEquals("substr",FunctionRegistry.getFunctionInfo("substr").getDisplayName());
}
EqualityVerifier ExceptionVerifier HybridVerifier
/**
* Verify that UDF in blacklist can't be accessed
* @throws Exception
*/
@Test(expected=SemanticException.class) public void testUdfInBlackList() throws Exception {
FunctionRegistry.setupPermissionsForBuiltinUDFs("","reflect");
assertEquals("reflect",FunctionRegistry.getFunctionInfo("reflect").getDisplayName());
}
EqualityVerifier ExceptionVerifier HybridVerifier
/**
* Verify that UDF in whitelist and blacklist can't be accessed
* @throws Exception
*/
@Test(expected=SemanticException.class) public void testUdfInBlackAndWhiteList() throws Exception {
FunctionRegistry.setupPermissionsForBuiltinUDFs("reflect","reflect");
assertEquals("reflect",FunctionRegistry.getFunctionInfo("reflect").getDisplayName());
}
EqualityVerifier ExceptionVerifier HybridVerifier
/**
* Verify that UDF not in whitelist can't be accessed
* @throws Exception
*/
@Test(expected=SemanticException.class) public void testUdfNotInWhiteList() throws Exception {
Set funcNames=FunctionRegistry.getFunctionNames();
funcNames.remove("reflect");
FunctionRegistry.setupPermissionsForBuiltinUDFs(funcNames.toString(),"");
assertEquals("reflect",FunctionRegistry.getFunctionInfo("reflect").getDisplayName());
}
EqualityVerifier ExceptionVerifier HybridVerifier
/**
* Test malformatted udf list setting
*/
@Test(expected=SemanticException.class) public void testMalformattedListProperty() throws Exception {
FunctionRegistry.setupPermissionsForBuiltinUDFs(",,"," ,reflect,");
assertEquals("reflect",FunctionRegistry.getFunctionInfo("reflect").getDisplayName());
}
Class: org.apache.hadoop.hive.ql.udf.TestToInteger InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testTextToInteger() throws Exception {
UDFToInteger ti=new UDFToInteger();
Text t1=new Text("-1");
IntWritable i1=ti.evaluate(t1);
assertEquals(-1,i1.get());
Text t2=new Text("0");
IntWritable i2=ti.evaluate(t2);
assertEquals(0,i2.get());
Text t3=new Text("A");
IntWritable i3=ti.evaluate(t3);
assertNull(i3);
Text t4=new Text("1.1");
IntWritable i4=ti.evaluate(t4);
assertEquals(1,i4.get());
}
Class: org.apache.hadoop.hive.ql.udf.TestUDFMath InternalCallVerifier EqualityVerifier
@Test public void testDegrees() throws HiveException {
UDFDegrees udf=new UDFDegrees();
input=createDecimal("0.7853981633974483");
DoubleWritable res=udf.evaluate(input);
Assert.assertEquals(45.0,res.get(),0.000001);
}
InternalCallVerifier EqualityVerifier
@Test public void testLn() throws HiveException {
UDFLn udf=new UDFLn();
input=createDecimal("7.38905609893065");
DoubleWritable res=udf.evaluate(input);
Assert.assertEquals(2.0,res.get(),0.000001);
}
InternalCallVerifier EqualityVerifier
@Test public void testLog2() throws HiveException {
UDFLog2 udf=new UDFLog2();
input=createDecimal("8.0");
DoubleWritable res=udf.evaluate(input);
Assert.assertEquals(3.0,res.get(),0.000001);
}
InternalCallVerifier EqualityVerifier
@Test public void testSqrt() throws HiveException {
UDFSqrt udf=new UDFSqrt();
input=createDecimal("49.0");
DoubleWritable res=udf.evaluate(input);
Assert.assertEquals(7.0,res.get(),0.000001);
}
InternalCallVerifier EqualityVerifier
@Test public void testExp() throws HiveException {
UDFExp udf=new UDFExp();
input=createDecimal("2.0");
DoubleWritable res=udf.evaluate(input);
Assert.assertEquals(7.38905609893065,res.get(),0.000001);
}
InternalCallVerifier EqualityVerifier
@Test public void testTan() throws HiveException {
UDFTan udf=new UDFTan();
input=createDecimal("0.7853981633974483");
DoubleWritable res=udf.evaluate(input);
Assert.assertEquals(1.0,res.get(),0.000001);
}
InternalCallVerifier EqualityVerifier
@Test public void testCos() throws HiveException {
UDFCos udf=new UDFCos();
input=createDecimal("0.7727408115633954");
DoubleWritable res=udf.evaluate(input);
Assert.assertEquals(0.716,res.get(),0.000001);
}
InternalCallVerifier EqualityVerifier
@Test public void testAtan() throws HiveException {
UDFAtan udf=new UDFAtan();
input=createDecimal("1.0");
DoubleWritable res=udf.evaluate(input);
Assert.assertEquals(0.7853981633974483,res.get(),0.000001);
}
InternalCallVerifier EqualityVerifier
@Test public void testAcos() throws HiveException {
UDFAcos udf=new UDFAcos();
input=createDecimal("0.716");
DoubleWritable res=udf.evaluate(input);
Assert.assertEquals(0.7727408115633954,res.get(),0.000001);
}
InternalCallVerifier EqualityVerifier
@Test public void testLog() throws HiveException {
UDFLog udf=new UDFLog();
input=createDecimal("7.38905609893065");
DoubleWritable res=udf.evaluate(input);
Assert.assertEquals(2.0,res.get(),0.000001);
res=udf.evaluate(createDecimal("3.0"),createDecimal("9.0"));
Assert.assertEquals(2.0,res.get(),0.000001);
}
InternalCallVerifier EqualityVerifier
@Test public void testLog10() throws HiveException {
UDFLog10 udf=new UDFLog10();
input=createDecimal("100.0");
DoubleWritable res=udf.evaluate(input);
Assert.assertEquals(2.0,res.get(),0.000001);
}
InternalCallVerifier EqualityVerifier
@Test public void testSin() throws HiveException {
UDFSin udf=new UDFSin();
input=createDecimal("0.7980555152315012");
DoubleWritable res=udf.evaluate(input);
Assert.assertEquals(0.716,res.get(),0.000001);
}
InternalCallVerifier EqualityVerifier
@Test public void testAsin() throws HiveException {
UDFAsin udf=new UDFAsin();
input=createDecimal("0.716");
DoubleWritable res=udf.evaluate(input);
Assert.assertEquals(0.7980555152315012,res.get(),0.000001);
}
InternalCallVerifier EqualityVerifier
@Test public void testRadians() throws HiveException {
UDFRadians udf=new UDFRadians();
input=createDecimal("45.0");
DoubleWritable res=udf.evaluate(input);
Assert.assertEquals(0.7853981633974483,res.get(),0.000001);
}
Class: org.apache.hadoop.hive.ql.udf.TestUDFSign InternalCallVerifier EqualityVerifier
@Test public void testDecimalSign() throws HiveException {
UDFSign udf=new UDFSign();
HiveDecimalWritable input=new HiveDecimalWritable(HiveDecimal.create("32300.004747"));
IntWritable res=udf.evaluate(input);
Assert.assertEquals(1,res.get());
input=new HiveDecimalWritable(HiveDecimal.create("-30.047"));
res=udf.evaluate(input);
Assert.assertEquals(-1,res.get());
input=new HiveDecimalWritable(HiveDecimal.ZERO);
res=udf.evaluate(input);
Assert.assertEquals(0,res.get());
}
Class: org.apache.hadoop.hive.ql.udf.generic.TestGenericUDFBRound APIUtilityVerifier EqualityVerifier
@Test public void testMathExprBround() throws HiveException {
double[] vArr={1.5,2.5,-1.5,-2.5,1.49,1.51};
for ( double v : vArr) {
double v1=RoundUtils.bround(v,0);
double v2=MathExpr.bround(v);
Assert.assertEquals(v1,v2,0.00001);
double v3=BigDecimal.valueOf(v).setScale(0,ROUND_HALF_EVEN).doubleValue();
Assert.assertEquals(v3,v2,0.00001);
}
}
Class: org.apache.hadoop.hive.ql.udf.generic.TestGenericUDFCeil InternalCallVerifier EqualityVerifier
@Test public void testDouble() throws HiveException {
GenericUDFCeil udf=new GenericUDFCeil();
DoubleWritable input=new DoubleWritable(32300.004747);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableDoubleObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo,oi.getTypeInfo());
LongWritable res=(LongWritable)udf.evaluate(args);
Assert.assertEquals(32301L,res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testString() throws HiveException {
GenericUDFCeil udf=new GenericUDFCeil();
Text input=new Text("32300.004747");
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableStringObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo,oi.getTypeInfo());
LongWritable res=(LongWritable)udf.evaluate(args);
Assert.assertEquals(32301L,res.get());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testChar() throws HiveException {
GenericUDFCeil udf=new GenericUDFCeil();
HiveChar vc=new HiveChar("-32300.004747",12);
HiveCharWritable input=new HiveCharWritable(vc);
CharTypeInfo inputTypeInfo=TypeInfoFactory.getCharTypeInfo(12);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo)};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo,oi.getTypeInfo());
LongWritable res=(LongWritable)udf.evaluate(args);
Assert.assertEquals(-32300L,res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testFloat() throws HiveException {
GenericUDFCeil udf=new GenericUDFCeil();
FloatWritable input=new FloatWritable(323.4747f);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableFloatObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo,oi.getTypeInfo());
LongWritable res=(LongWritable)udf.evaluate(args);
Assert.assertEquals(324L,res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testShort() throws HiveException {
GenericUDFCeil udf=new GenericUDFCeil();
ShortWritable input=new ShortWritable((short)-74);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableShortObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo,oi.getTypeInfo());
LongWritable res=(LongWritable)udf.evaluate(args);
Assert.assertEquals(-74L,res.get());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDecimal() throws HiveException {
GenericUDFCeil udf=new GenericUDFCeil();
HiveDecimalWritable input=new HiveDecimalWritable(HiveDecimal.create("32300.004747"));
DecimalTypeInfo inputTypeInfo=TypeInfoFactory.getDecimalTypeInfo(11,6);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo)};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(6,0),oi.getTypeInfo());
HiveDecimalWritable res=(HiveDecimalWritable)udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("32301"),res.getHiveDecimal());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testVarchar() throws HiveException {
GenericUDFCeil udf=new GenericUDFCeil();
HiveVarchar vc=new HiveVarchar("32300.004747",12);
HiveVarcharWritable input=new HiveVarcharWritable(vc);
VarcharTypeInfo inputTypeInfo=TypeInfoFactory.getVarcharTypeInfo(12);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo)};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo,oi.getTypeInfo());
LongWritable res=(LongWritable)udf.evaluate(args);
Assert.assertEquals(32301L,res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testLong() throws HiveException {
GenericUDFCeil udf=new GenericUDFCeil();
LongWritable input=new LongWritable(3234747);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableLongObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo,oi.getTypeInfo());
LongWritable res=(LongWritable)udf.evaluate(args);
Assert.assertEquals(3234747L,res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testInt() throws HiveException {
GenericUDFCeil udf=new GenericUDFCeil();
IntWritable input=new IntWritable(747);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableIntObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo,oi.getTypeInfo());
LongWritable res=(LongWritable)udf.evaluate(args);
Assert.assertEquals(747L,res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testByte() throws HiveException {
GenericUDFCeil udf=new GenericUDFCeil();
ByteWritable input=new ByteWritable((byte)4);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableByteObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo,oi.getTypeInfo());
LongWritable res=(LongWritable)udf.evaluate(args);
Assert.assertEquals(4L,res.get());
}
Class: org.apache.hadoop.hive.ql.udf.generic.TestGenericUDFFloor InternalCallVerifier EqualityVerifier
@Test public void testLong() throws HiveException {
GenericUDFFloor udf=new GenericUDFFloor();
LongWritable input=new LongWritable(3234747);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableLongObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo,oi.getTypeInfo());
LongWritable res=(LongWritable)udf.evaluate(args);
Assert.assertEquals(3234747L,res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testInt() throws HiveException {
GenericUDFFloor udf=new GenericUDFFloor();
IntWritable input=new IntWritable(-747);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableIntObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo,oi.getTypeInfo());
LongWritable res=(LongWritable)udf.evaluate(args);
Assert.assertEquals(-747L,res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testByte() throws HiveException {
GenericUDFFloor udf=new GenericUDFFloor();
ByteWritable input=new ByteWritable((byte)4);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableByteObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo,oi.getTypeInfo());
LongWritable res=(LongWritable)udf.evaluate(args);
Assert.assertEquals(4L,res.get());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testVarchar() throws HiveException {
GenericUDFFloor udf=new GenericUDFFloor();
HiveVarchar vc=new HiveVarchar("32300.004747",12);
HiveVarcharWritable input=new HiveVarcharWritable(vc);
VarcharTypeInfo inputTypeInfo=TypeInfoFactory.getVarcharTypeInfo(12);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo)};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo,oi.getTypeInfo());
LongWritable res=(LongWritable)udf.evaluate(args);
Assert.assertEquals(32300L,res.get());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDecimal() throws HiveException {
GenericUDFFloor udf=new GenericUDFFloor();
HiveDecimalWritable input=new HiveDecimalWritable(HiveDecimal.create("32300.004747"));
DecimalTypeInfo inputTypeInfo=TypeInfoFactory.getDecimalTypeInfo(11,6);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo)};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(6,0),oi.getTypeInfo());
HiveDecimalWritable res=(HiveDecimalWritable)udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("32300"),res.getHiveDecimal());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testChar() throws HiveException {
GenericUDFFloor udf=new GenericUDFFloor();
HiveChar vc=new HiveChar("32300.004747",12);
HiveCharWritable input=new HiveCharWritable(vc);
CharTypeInfo inputTypeInfo=TypeInfoFactory.getCharTypeInfo(12);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo)};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo,oi.getTypeInfo());
LongWritable res=(LongWritable)udf.evaluate(args);
Assert.assertEquals(32300L,res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testString() throws HiveException {
GenericUDFFloor udf=new GenericUDFFloor();
Text input=new Text("32300.004747");
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableStringObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo,oi.getTypeInfo());
LongWritable res=(LongWritable)udf.evaluate(args);
Assert.assertEquals(32300L,res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testDouble() throws HiveException {
GenericUDFFloor udf=new GenericUDFFloor();
DoubleWritable input=new DoubleWritable(32300.004747);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableDoubleObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo,oi.getTypeInfo());
LongWritable res=(LongWritable)udf.evaluate(args);
Assert.assertEquals(32300L,res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testShort() throws HiveException {
GenericUDFFloor udf=new GenericUDFFloor();
ShortWritable input=new ShortWritable((short)74);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableShortObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo,oi.getTypeInfo());
LongWritable res=(LongWritable)udf.evaluate(args);
Assert.assertEquals(74L,res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testFloat() throws HiveException {
GenericUDFFloor udf=new GenericUDFFloor();
FloatWritable input=new FloatWritable(-323.4747f);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableFloatObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo,oi.getTypeInfo());
LongWritable res=(LongWritable)udf.evaluate(args);
Assert.assertEquals(-324L,res.get());
}
Class: org.apache.hadoop.hive.ql.udf.generic.TestGenericUDFMacro BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testUDF() throws Exception {
udf=new GenericUDFMacro(name,bodyDesc,colNames,colTypes);
udf.initialize(inspectors);
Object actual=udf.evaluate(arguments);
Assert.assertEquals(bodyDesc.getValue(),((IntWritable)actual).get());
Assert.assertTrue(udf.isDeterministic());
Assert.assertFalse(udf.isStateful());
Assert.assertEquals(name,udf.getMacroName());
Assert.assertEquals(bodyDesc,udf.getBody());
Assert.assertEquals(colNames,udf.getColNames());
Assert.assertEquals(colTypes,udf.getColTypes());
Assert.assertEquals(name + "(x, y)",udf.getDisplayString(new String[]{"x","y"}));
}
Class: org.apache.hadoop.hive.ql.udf.generic.TestGenericUDFOPDivide InternalCallVerifier EqualityVerifier
@Test public void testVarcharDivideInt() throws HiveException {
GenericUDFOPDivide udf=new GenericUDFOPDivide();
HiveVarcharWritable left=new HiveVarcharWritable();
left.set("123");
IntWritable right=new IntWritable(456);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector,PrimitiveObjectInspectorFactory.writableIntObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(oi.getTypeInfo(),TypeInfoFactory.doubleTypeInfo);
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(123.0 / 456.0),new Double(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testDouleDivideDecimal() throws HiveException {
GenericUDFOPDivide udf=new GenericUDFOPDivide();
DoubleWritable left=new DoubleWritable(74.52);
HiveDecimalWritable right=new HiveDecimalWritable(HiveDecimal.create("234.97"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2))};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo,oi.getTypeInfo());
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(74.52 / 234.97),new Double(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testDoubleDivideLong() throws HiveException {
GenericUDFOPDivide udf=new GenericUDFOPDivide();
DoubleWritable left=new DoubleWritable(4.5);
LongWritable right=new LongWritable(10);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,PrimitiveObjectInspectorFactory.writableLongObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo,oi.getTypeInfo());
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(0.45),new Double(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testDecimalDivideDecimal() throws HiveException {
GenericUDFOPDivide udf=new GenericUDFOPDivide();
HiveDecimalWritable left=new HiveDecimalWritable(HiveDecimal.create("14.5"));
HiveDecimalWritable right=new HiveDecimalWritable(HiveDecimal.create("234.97"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(3,1)),PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2))};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(11,7),oi.getTypeInfo());
HiveDecimalWritable res=(HiveDecimalWritable)udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("0.06171"),res.getHiveDecimal());
}
InternalCallVerifier EqualityVerifier
@Test public void testDecimalDivideDecimalSameParams() throws HiveException {
GenericUDFOPDivide udf=new GenericUDFOPDivide();
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2)),PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2))};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(13,8),oi.getTypeInfo());
}
InternalCallVerifier EqualityVerifier
@Test public void testFloatDivideFloat() throws HiveException {
GenericUDFOPDivide udf=new GenericUDFOPDivide();
FloatWritable f1=new FloatWritable(4.5f);
FloatWritable f2=new FloatWritable(1.5f);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableFloatObjectInspector,PrimitiveObjectInspectorFactory.writableFloatObjectInspector};
DeferredObject[] args={new DeferredJavaObject(f1),new DeferredJavaObject(f2)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(oi.getTypeInfo(),TypeInfoFactory.doubleTypeInfo);
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(3.0),new Double(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testByteDivideShort() throws HiveException {
GenericUDFOPDivide udf=new GenericUDFOPDivide();
ByteWritable left=new ByteWritable((byte)4);
ShortWritable right=new ShortWritable((short)6);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableByteObjectInspector,PrimitiveObjectInspectorFactory.writableShortObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(oi.getTypeInfo(),TypeInfoFactory.getDecimalTypeInfo(9,6));
HiveDecimalWritable res=(HiveDecimalWritable)udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("0.666667"),res.getHiveDecimal());
}
InternalCallVerifier EqualityVerifier
@Test public void testLongDivideDecimal() throws HiveException {
GenericUDFOPDivide udf=new GenericUDFOPDivide();
LongWritable left=new LongWritable(104);
HiveDecimalWritable right=new HiveDecimalWritable(HiveDecimal.create("234.97"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableLongObjectInspector,PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(9,4))};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(33,10),oi.getTypeInfo());
HiveDecimalWritable res=(HiveDecimalWritable)udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("0.4426096949"),res.getHiveDecimal());
}
InternalCallVerifier EqualityVerifier
@Test public void testDecimalDivideDecimal2() throws HiveException {
GenericUDFOPDivide udf=new GenericUDFOPDivide();
HiveDecimalWritable left=new HiveDecimalWritable(HiveDecimal.create("5"));
HiveDecimalWritable right=new HiveDecimalWritable(HiveDecimal.create("25"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(1,0)),PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(2,0))};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(7,6),oi.getTypeInfo());
HiveDecimalWritable res=(HiveDecimalWritable)udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("0.2"),res.getHiveDecimal());
}
Class: org.apache.hadoop.hive.ql.udf.generic.TestGenericUDFOPMinus InternalCallVerifier EqualityVerifier
@Test public void testIntervalYearMonthMinusIntervalYearMonth() throws Exception {
GenericUDFOPMinus udf=new GenericUDFOPMinus();
HiveIntervalYearMonthWritable left=new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("3-1"));
HiveIntervalYearMonthWritable right=new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("1-2"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector,PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.intervalYearMonthTypeInfo,oi.getTypeInfo());
HiveIntervalYearMonthWritable res=(HiveIntervalYearMonthWritable)udf.evaluate(args);
Assert.assertEquals(HiveIntervalYearMonth.valueOf("1-11"),res.getHiveIntervalYearMonth());
}
InternalCallVerifier EqualityVerifier
@Test public void testDecimalMinusDecimalSameParams() throws HiveException {
GenericUDFOPMinus udf=new GenericUDFOPMinus();
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2)),PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2))};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(6,2),oi.getTypeInfo());
}
InternalCallVerifier EqualityVerifier
@Test public void testFloatMinusFloat() throws HiveException {
GenericUDFOPMinus udf=new GenericUDFOPMinus();
FloatWritable f1=new FloatWritable(4.5f);
FloatWritable f2=new FloatWritable(0.0f);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableFloatObjectInspector,PrimitiveObjectInspectorFactory.writableFloatObjectInspector};
DeferredObject[] args={new DeferredJavaObject(f1),new DeferredJavaObject(f2)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(oi.getTypeInfo(),TypeInfoFactory.floatTypeInfo);
FloatWritable res=(FloatWritable)udf.evaluate(args);
Assert.assertEquals(new Float(4.5),new Float(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testTimestampMinusIntervalYearMonth() throws Exception {
GenericUDFOPMinus udf=new GenericUDFOPMinus();
TimestampWritable left=new TimestampWritable(Timestamp.valueOf("2004-01-15 01:02:03.123456789"));
HiveIntervalYearMonthWritable right=new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-2"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableTimestampObjectInspector,PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.timestampTypeInfo,oi.getTypeInfo());
TimestampWritable res=(TimestampWritable)udf.evaluate(args);
Assert.assertEquals(Timestamp.valueOf("2001-11-15 01:02:03.123456789"),res.getTimestamp());
}
InternalCallVerifier EqualityVerifier
@Test public void testDateMinusIntervalDayTime() throws Exception {
GenericUDFOPMinus udf=new GenericUDFOPMinus();
DateWritable left=new DateWritable(Date.valueOf("2001-01-01"));
HiveIntervalDayTimeWritable right=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 0:0:0.555"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableDateObjectInspector,PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.timestampTypeInfo,oi.getTypeInfo());
TimestampWritable res=(TimestampWritable)udf.evaluate(args);
Assert.assertEquals(Timestamp.valueOf("2000-12-30 23:59:59.445"),res.getTimestamp());
}
InternalCallVerifier EqualityVerifier
@Test public void testLongMinusDecimal() throws HiveException {
GenericUDFOPMinus udf=new GenericUDFOPMinus();
LongWritable left=new LongWritable(104);
HiveDecimalWritable right=new HiveDecimalWritable(HiveDecimal.create("234.97"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableLongObjectInspector,PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(9,4))};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(24,4),oi.getTypeInfo());
HiveDecimalWritable res=(HiveDecimalWritable)udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("-130.97"),res.getHiveDecimal());
}
InternalCallVerifier EqualityVerifier
@Test public void testDouleMinusDecimal() throws HiveException {
GenericUDFOPMinus udf=new GenericUDFOPMinus();
DoubleWritable left=new DoubleWritable(74.52);
HiveDecimalWritable right=new HiveDecimalWritable(HiveDecimal.create("234.97"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2))};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo,oi.getTypeInfo());
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(-160.45),new Double(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testByteMinusShort() throws HiveException {
GenericUDFOPMinus udf=new GenericUDFOPMinus();
ByteWritable left=new ByteWritable((byte)4);
ShortWritable right=new ShortWritable((short)6);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableByteObjectInspector,PrimitiveObjectInspectorFactory.writableShortObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(oi.getTypeInfo(),TypeInfoFactory.shortTypeInfo);
ShortWritable res=(ShortWritable)udf.evaluate(args);
Assert.assertEquals(-2,res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testIntervalDayTimeMinusIntervalDayTime() throws Exception {
GenericUDFOPMinus udf=new GenericUDFOPMinus();
HiveIntervalDayTimeWritable left=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("2 2:3:4.567"));
HiveIntervalDayTimeWritable right=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector,PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.intervalDayTimeTypeInfo,oi.getTypeInfo());
HiveIntervalDayTimeWritable res=(HiveIntervalDayTimeWritable)udf.evaluate(args);
Assert.assertEquals(HiveIntervalDayTime.valueOf("1 0:0:0.567"),res.getHiveIntervalDayTime());
}
InternalCallVerifier EqualityVerifier
@Test public void testTimestampMinusIntervalDayTime() throws Exception {
GenericUDFOPMinus udf=new GenericUDFOPMinus();
TimestampWritable left=new TimestampWritable(Timestamp.valueOf("2001-01-02 2:3:4.567"));
HiveIntervalDayTimeWritable right=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableTimestampObjectInspector,PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.timestampTypeInfo,oi.getTypeInfo());
TimestampWritable res=(TimestampWritable)udf.evaluate(args);
Assert.assertEquals(Timestamp.valueOf("2001-01-01 00:00:00"),res.getTimestamp());
}
InternalCallVerifier EqualityVerifier
@Test public void testDateMinusIntervalYearMonth() throws Exception {
GenericUDFOPMinus udf=new GenericUDFOPMinus();
DateWritable left=new DateWritable(Date.valueOf("2004-02-15"));
HiveIntervalYearMonthWritable right=new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableDateObjectInspector,PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.dateTypeInfo,oi.getTypeInfo());
DateWritable res=(DateWritable)udf.evaluate(args);
Assert.assertEquals(Date.valueOf("2001-06-15"),res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testDecimalMinusDecimal() throws HiveException {
GenericUDFOPMinus udf=new GenericUDFOPMinus();
HiveDecimalWritable left=new HiveDecimalWritable(HiveDecimal.create("14.5"));
HiveDecimalWritable right=new HiveDecimalWritable(HiveDecimal.create("234.97"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(3,1)),PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2))};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(6,2),oi.getTypeInfo());
HiveDecimalWritable res=(HiveDecimalWritable)udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("-220.47"),res.getHiveDecimal());
}
InternalCallVerifier EqualityVerifier
@Test public void testDoubleMinusLong() throws HiveException {
GenericUDFOPMinus udf=new GenericUDFOPMinus();
DoubleWritable left=new DoubleWritable(4.5);
LongWritable right=new LongWritable(10);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,PrimitiveObjectInspectorFactory.writableLongObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo,oi.getTypeInfo());
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(-5.5),new Double(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testVarcharMinusInt() throws HiveException {
GenericUDFOPMinus udf=new GenericUDFOPMinus();
HiveVarcharWritable left=new HiveVarcharWritable();
left.set("123");
IntWritable right=new IntWritable(456);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector,PrimitiveObjectInspectorFactory.writableIntObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(oi.getTypeInfo(),TypeInfoFactory.doubleTypeInfo);
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(-333.0),new Double(res.get()));
}
Class: org.apache.hadoop.hive.ql.udf.generic.TestGenericUDFOPMod InternalCallVerifier EqualityVerifier
@Test public void testDecimalModDecimalSameParams() throws HiveException {
GenericUDFOPMod udf=new GenericUDFOPMod();
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2)),PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2))};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(5,2),oi.getTypeInfo());
}
InternalCallVerifier EqualityVerifier
@Test public void testDecimalModDecimal() throws HiveException {
GenericUDFOPMod udf=new GenericUDFOPMod();
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(3,1)),PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2))};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(4,2),oi.getTypeInfo());
}
Class: org.apache.hadoop.hive.ql.udf.generic.TestGenericUDFOPMultiply InternalCallVerifier EqualityVerifier
@Test public void testLongTimesDecimal() throws HiveException {
GenericUDFOPMultiply udf=new GenericUDFOPMultiply();
LongWritable left=new LongWritable(104);
HiveDecimalWritable right=new HiveDecimalWritable(HiveDecimal.create("234.97"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableLongObjectInspector,PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(9,4))};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(29,4),oi.getTypeInfo());
HiveDecimalWritable res=(HiveDecimalWritable)udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("24436.88"),res.getHiveDecimal());
}
InternalCallVerifier EqualityVerifier
@Test public void testDecimalTimesDecimalSameParams() throws HiveException {
GenericUDFOPMultiply udf=new GenericUDFOPMultiply();
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2)),PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2))};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(11,4),oi.getTypeInfo());
}
InternalCallVerifier EqualityVerifier
@Test public void testDecimalTimesDecimal() throws HiveException {
GenericUDFOPMultiply udf=new GenericUDFOPMultiply();
HiveDecimalWritable left=new HiveDecimalWritable(HiveDecimal.create("14.5"));
HiveDecimalWritable right=new HiveDecimalWritable(HiveDecimal.create("234.97"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(3,1)),PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2))};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(9,3),oi.getTypeInfo());
HiveDecimalWritable res=(HiveDecimalWritable)udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("3407.065"),res.getHiveDecimal());
}
InternalCallVerifier EqualityVerifier
@Test public void testDouleTimesDecimal() throws HiveException {
GenericUDFOPMultiply udf=new GenericUDFOPMultiply();
DoubleWritable left=new DoubleWritable(74.52);
HiveDecimalWritable right=new HiveDecimalWritable(HiveDecimal.create("234.97"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2))};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo,oi.getTypeInfo());
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(17509.9644),new Double(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testFloatTimesFloat() throws HiveException {
GenericUDFOPMultiply udf=new GenericUDFOPMultiply();
FloatWritable f1=new FloatWritable(4.5f);
FloatWritable f2=new FloatWritable(0.0f);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableFloatObjectInspector,PrimitiveObjectInspectorFactory.writableFloatObjectInspector};
DeferredObject[] args={new DeferredJavaObject(f1),new DeferredJavaObject(f2)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(oi.getTypeInfo(),TypeInfoFactory.floatTypeInfo);
FloatWritable res=(FloatWritable)udf.evaluate(args);
Assert.assertEquals(new Float(0.0),new Float(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testDoubleTimesLong() throws HiveException {
GenericUDFOPMultiply udf=new GenericUDFOPMultiply();
DoubleWritable left=new DoubleWritable(4.5);
LongWritable right=new LongWritable(10);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,PrimitiveObjectInspectorFactory.writableLongObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo,oi.getTypeInfo());
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(45.0),new Double(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testByteTimesShort() throws HiveException {
GenericUDFOPMultiply udf=new GenericUDFOPMultiply();
ByteWritable left=new ByteWritable((byte)4);
ShortWritable right=new ShortWritable((short)6);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableByteObjectInspector,PrimitiveObjectInspectorFactory.writableShortObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(oi.getTypeInfo(),TypeInfoFactory.shortTypeInfo);
ShortWritable res=(ShortWritable)udf.evaluate(args);
Assert.assertEquals(24,res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testVarcharTimesInt() throws HiveException {
GenericUDFOPMultiply udf=new GenericUDFOPMultiply();
HiveVarcharWritable left=new HiveVarcharWritable();
left.set("123");
IntWritable right=new IntWritable(456);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector,PrimitiveObjectInspectorFactory.writableIntObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(oi.getTypeInfo(),TypeInfoFactory.doubleTypeInfo);
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(123 * 456),new Double(res.get()));
}
Class: org.apache.hadoop.hive.ql.udf.generic.TestGenericUDFOPNegative InternalCallVerifier EqualityVerifier
@Test public void testInt() throws HiveException {
GenericUDFOPNegative udf=new GenericUDFOPNegative();
IntWritable input=new IntWritable(747);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableIntObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.intTypeInfo,oi.getTypeInfo());
IntWritable res=(IntWritable)udf.evaluate(args);
Assert.assertEquals(-747,res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testShort() throws HiveException {
GenericUDFOPNegative udf=new GenericUDFOPNegative();
ShortWritable input=new ShortWritable((short)74);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableShortObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.shortTypeInfo,oi.getTypeInfo());
ShortWritable res=(ShortWritable)udf.evaluate(args);
Assert.assertEquals((short)-74,res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testDouble() throws HiveException {
GenericUDFOPNegative udf=new GenericUDFOPNegative();
DoubleWritable input=new DoubleWritable(32300.004747);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableDoubleObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo,oi.getTypeInfo());
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(-32300.004747),new Double(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testByte() throws HiveException {
GenericUDFOPNegative udf=new GenericUDFOPNegative();
ByteWritable input=new ByteWritable((byte)4);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableByteObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.byteTypeInfo,oi.getTypeInfo());
ByteWritable res=(ByteWritable)udf.evaluate(args);
Assert.assertEquals((byte)-4,res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testLong() throws HiveException {
GenericUDFOPNegative udf=new GenericUDFOPNegative();
LongWritable input=new LongWritable(3234747);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableLongObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo,oi.getTypeInfo());
LongWritable res=(LongWritable)udf.evaluate(args);
Assert.assertEquals(-3234747L,res.get());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testChar() throws HiveException {
GenericUDFOPNegative udf=new GenericUDFOPNegative();
HiveChar vc=new HiveChar("32300.004747",12);
HiveCharWritable input=new HiveCharWritable(vc);
CharTypeInfo inputTypeInfo=TypeInfoFactory.getCharTypeInfo(12);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo)};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo,oi.getTypeInfo());
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(-32300.004747),new Double(res.get()));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testVarchar() throws HiveException {
GenericUDFOPNegative udf=new GenericUDFOPNegative();
HiveVarchar vc=new HiveVarchar("32300.004747",12);
HiveVarcharWritable input=new HiveVarcharWritable(vc);
VarcharTypeInfo inputTypeInfo=TypeInfoFactory.getVarcharTypeInfo(12);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo)};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo,oi.getTypeInfo());
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(-32300.004747),new Double(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testFloat() throws HiveException {
GenericUDFOPNegative udf=new GenericUDFOPNegative();
FloatWritable input=new FloatWritable(323.4747f);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableFloatObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.floatTypeInfo,oi.getTypeInfo());
FloatWritable res=(FloatWritable)udf.evaluate(args);
Assert.assertEquals(new Float(-323.4747f),new Float(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testString() throws HiveException {
GenericUDFOPNegative udf=new GenericUDFOPNegative();
Text input=new Text("32300.004747");
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableStringObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo,oi.getTypeInfo());
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(-32300.004747),new Double(res.get()));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDecimal() throws HiveException {
GenericUDFOPNegative udf=new GenericUDFOPNegative();
HiveDecimalWritable input=new HiveDecimalWritable(HiveDecimal.create("32300.004747"));
DecimalTypeInfo inputTypeInfo=TypeInfoFactory.getDecimalTypeInfo(11,6);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo)};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(inputTypeInfo,oi.getTypeInfo());
HiveDecimalWritable res=(HiveDecimalWritable)udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("-32300.004747"),res.getHiveDecimal());
}
Class: org.apache.hadoop.hive.ql.udf.generic.TestGenericUDFOPPlus InternalCallVerifier EqualityVerifier
@Test public void testIntervalYearMonthPlusDate() throws Exception {
GenericUDFOPPlus udf=new GenericUDFOPPlus();
HiveIntervalYearMonthWritable left=new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8"));
DateWritable right=new DateWritable(Date.valueOf("2001-06-15"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector,PrimitiveObjectInspectorFactory.writableDateObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.dateTypeInfo,oi.getTypeInfo());
DateWritable res=(DateWritable)udf.evaluate(args);
Assert.assertEquals(Date.valueOf("2004-02-15"),res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testDecimalPlusDecimalSameParams() throws HiveException {
GenericUDFOPPlus udf=new GenericUDFOPPlus();
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2)),PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2))};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(6,2),oi.getTypeInfo());
}
InternalCallVerifier EqualityVerifier
@Test public void testFloatPlusFloat() throws HiveException {
GenericUDFOPPlus udf=new GenericUDFOPPlus();
FloatWritable f1=new FloatWritable(4.5f);
FloatWritable f2=new FloatWritable(0.0f);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableFloatObjectInspector,PrimitiveObjectInspectorFactory.writableFloatObjectInspector};
DeferredObject[] args={new DeferredJavaObject(f1),new DeferredJavaObject(f2)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(oi.getTypeInfo(),TypeInfoFactory.floatTypeInfo);
FloatWritable res=(FloatWritable)udf.evaluate(args);
Assert.assertEquals(new Float(4.5),new Float(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testIntervalYearMonthPlusIntervalYearMonth() throws Exception {
GenericUDFOPPlus udf=new GenericUDFOPPlus();
HiveIntervalYearMonthWritable left=new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("1-2"));
HiveIntervalYearMonthWritable right=new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("1-11"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector,PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.intervalYearMonthTypeInfo,oi.getTypeInfo());
HiveIntervalYearMonthWritable res=(HiveIntervalYearMonthWritable)udf.evaluate(args);
Assert.assertEquals(HiveIntervalYearMonth.valueOf("3-1"),res.getHiveIntervalYearMonth());
}
InternalCallVerifier EqualityVerifier
@Test public void testDecimalPlusDecimal() throws HiveException {
GenericUDFOPPlus udf=new GenericUDFOPPlus();
HiveDecimalWritable left=new HiveDecimalWritable(HiveDecimal.create("14.5"));
HiveDecimalWritable right=new HiveDecimalWritable(HiveDecimal.create("234.97"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(3,1)),PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2))};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(6,2),oi.getTypeInfo());
HiveDecimalWritable res=(HiveDecimalWritable)udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("249.47"),res.getHiveDecimal());
}
InternalCallVerifier EqualityVerifier
@Test public void testTimestampPlusIntervalYearMonth() throws Exception {
GenericUDFOPPlus udf=new GenericUDFOPPlus();
TimestampWritable left=new TimestampWritable(Timestamp.valueOf("2001-11-15 01:02:03.123456789"));
HiveIntervalYearMonthWritable right=new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-2"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableTimestampObjectInspector,PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.timestampTypeInfo,oi.getTypeInfo());
TimestampWritable res=(TimestampWritable)udf.evaluate(args);
Assert.assertEquals(Timestamp.valueOf("2004-01-15 01:02:03.123456789"),res.getTimestamp());
}
InternalCallVerifier EqualityVerifier
@Test public void testIntervalDayTimePlusTimestamp() throws Exception {
GenericUDFOPPlus udf=new GenericUDFOPPlus();
HiveIntervalDayTimeWritable left=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
TimestampWritable right=new TimestampWritable(Timestamp.valueOf("2001-01-01 00:00:00"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector,PrimitiveObjectInspectorFactory.writableTimestampObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.timestampTypeInfo,oi.getTypeInfo());
TimestampWritable res=(TimestampWritable)udf.evaluate(args);
Assert.assertEquals(Timestamp.valueOf("2001-01-02 2:3:4.567"),res.getTimestamp());
}
InternalCallVerifier EqualityVerifier
@Test public void testTimestampPlusIntervalDayTime() throws Exception {
GenericUDFOPPlus udf=new GenericUDFOPPlus();
TimestampWritable left=new TimestampWritable(Timestamp.valueOf("2001-01-01 00:00:00"));
HiveIntervalDayTimeWritable right=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableTimestampObjectInspector,PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.timestampTypeInfo,oi.getTypeInfo());
TimestampWritable res=(TimestampWritable)udf.evaluate(args);
Assert.assertEquals(Timestamp.valueOf("2001-01-02 2:3:4.567"),res.getTimestamp());
}
InternalCallVerifier EqualityVerifier
@Test public void testIntervalDayTimePlusIntervalDayTime() throws Exception {
GenericUDFOPPlus udf=new GenericUDFOPPlus();
HiveIntervalDayTimeWritable left=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 0:0:0.567"));
HiveIntervalDayTimeWritable right=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector,PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.intervalDayTimeTypeInfo,oi.getTypeInfo());
HiveIntervalDayTimeWritable res=(HiveIntervalDayTimeWritable)udf.evaluate(args);
Assert.assertEquals(HiveIntervalDayTime.valueOf("2 2:3:4.567"),res.getHiveIntervalDayTime());
}
InternalCallVerifier EqualityVerifier
@Test public void testIntervalDayTimePlusDate() throws Exception {
GenericUDFOPPlus udf=new GenericUDFOPPlus();
HiveIntervalDayTimeWritable left=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
DateWritable right=new DateWritable(Date.valueOf("2001-01-01"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector,PrimitiveObjectInspectorFactory.writableDateObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.timestampTypeInfo,oi.getTypeInfo());
TimestampWritable res=(TimestampWritable)udf.evaluate(args);
Assert.assertEquals(Timestamp.valueOf("2001-01-02 2:3:4.567"),res.getTimestamp());
}
InternalCallVerifier EqualityVerifier
@Test public void testVarcharPlusInt() throws HiveException {
GenericUDFOPPlus udf=new GenericUDFOPPlus();
HiveVarcharWritable left=new HiveVarcharWritable();
left.set("123");
IntWritable right=new IntWritable(456);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector,PrimitiveObjectInspectorFactory.writableIntObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(oi.getTypeInfo(),TypeInfoFactory.doubleTypeInfo);
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(579.0),new Double(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testDatePlusIntervalYearMonth() throws Exception {
GenericUDFOPPlus udf=new GenericUDFOPPlus();
DateWritable left=new DateWritable(Date.valueOf("2001-06-15"));
HiveIntervalYearMonthWritable right=new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableDateObjectInspector,PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.dateTypeInfo,oi.getTypeInfo());
DateWritable res=(DateWritable)udf.evaluate(args);
Assert.assertEquals(Date.valueOf("2004-02-15"),res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testDatePlusIntervalDayTime() throws Exception {
GenericUDFOPPlus udf=new GenericUDFOPPlus();
DateWritable left=new DateWritable(Date.valueOf("2001-01-01"));
HiveIntervalDayTimeWritable right=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableDateObjectInspector,PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.timestampTypeInfo,oi.getTypeInfo());
TimestampWritable res=(TimestampWritable)udf.evaluate(args);
Assert.assertEquals(Timestamp.valueOf("2001-01-02 2:3:4.567"),res.getTimestamp());
}
InternalCallVerifier EqualityVerifier
@Test public void testDoublePlusLong() throws HiveException {
GenericUDFOPPlus udf=new GenericUDFOPPlus();
DoubleWritable left=new DoubleWritable(4.5);
LongWritable right=new LongWritable(10);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,PrimitiveObjectInspectorFactory.writableLongObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo,oi.getTypeInfo());
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(14.5),new Double(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testIntervalYearMonthPlusTimestamp() throws Exception {
GenericUDFOPPlus udf=new GenericUDFOPPlus();
HiveIntervalYearMonthWritable left=new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-2"));
TimestampWritable right=new TimestampWritable(Timestamp.valueOf("2001-11-15 01:02:03.123456789"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector,PrimitiveObjectInspectorFactory.writableTimestampObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.timestampTypeInfo,oi.getTypeInfo());
TimestampWritable res=(TimestampWritable)udf.evaluate(args);
Assert.assertEquals(Timestamp.valueOf("2004-01-15 01:02:03.123456789"),res.getTimestamp());
}
InternalCallVerifier EqualityVerifier
@Test public void testBytePlusShort() throws HiveException {
GenericUDFOPPlus udf=new GenericUDFOPPlus();
ByteWritable left=new ByteWritable((byte)4);
ShortWritable right=new ShortWritable((short)6);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableByteObjectInspector,PrimitiveObjectInspectorFactory.writableShortObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(oi.getTypeInfo(),TypeInfoFactory.shortTypeInfo);
ShortWritable res=(ShortWritable)udf.evaluate(args);
Assert.assertEquals(10,res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testDoulePlusDecimal() throws HiveException {
GenericUDFOPPlus udf=new GenericUDFOPPlus();
DoubleWritable left=new DoubleWritable(74.52);
HiveDecimalWritable right=new HiveDecimalWritable(HiveDecimal.create("234.97"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2))};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo,oi.getTypeInfo());
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(309.49),new Double(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testLongPlusDecimal() throws HiveException {
GenericUDFOPPlus udf=new GenericUDFOPPlus();
LongWritable left=new LongWritable(104);
HiveDecimalWritable right=new HiveDecimalWritable(HiveDecimal.create("234.97"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableLongObjectInspector,PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(9,4))};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(24,4),oi.getTypeInfo());
HiveDecimalWritable res=(HiveDecimalWritable)udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("338.97"),res.getHiveDecimal());
}
Class: org.apache.hadoop.hive.ql.udf.generic.TestGenericUDFOPPositive InternalCallVerifier EqualityVerifier
@Test public void testFloat() throws HiveException {
GenericUDFOPPositive udf=new GenericUDFOPPositive();
FloatWritable input=new FloatWritable(323.4747f);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableFloatObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.floatTypeInfo,oi.getTypeInfo());
FloatWritable res=(FloatWritable)udf.evaluate(args);
Assert.assertEquals(new Float(323.4747f),new Float(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testLong() throws HiveException {
GenericUDFOPPositive udf=new GenericUDFOPPositive();
LongWritable input=new LongWritable(3234747);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableLongObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo,oi.getTypeInfo());
LongWritable res=(LongWritable)udf.evaluate(args);
Assert.assertEquals(3234747L,res.get());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testChar() throws HiveException {
GenericUDFOPPositive udf=new GenericUDFOPPositive();
HiveChar vc=new HiveChar("32300.004747",12);
HiveCharWritable input=new HiveCharWritable(vc);
CharTypeInfo inputTypeInfo=TypeInfoFactory.getCharTypeInfo(12);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo)};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo,oi.getTypeInfo());
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(32300.004747),new Double(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testDouble() throws HiveException {
GenericUDFOPPositive udf=new GenericUDFOPPositive();
DoubleWritable input=new DoubleWritable(32300.004747);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableDoubleObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo,oi.getTypeInfo());
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(32300.004747),new Double(res.get()));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDecimal() throws HiveException {
GenericUDFOPPositive udf=new GenericUDFOPPositive();
HiveDecimalWritable input=new HiveDecimalWritable(HiveDecimal.create("32300.004747"));
DecimalTypeInfo inputTypeInfo=TypeInfoFactory.getDecimalTypeInfo(11,6);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo)};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(inputTypeInfo,oi.getTypeInfo());
HiveDecimalWritable res=(HiveDecimalWritable)udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("32300.004747"),res.getHiveDecimal());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testVarchar() throws HiveException {
GenericUDFOPPositive udf=new GenericUDFOPPositive();
HiveVarchar vc=new HiveVarchar("32300.004747",12);
HiveVarcharWritable input=new HiveVarcharWritable(vc);
VarcharTypeInfo inputTypeInfo=TypeInfoFactory.getVarcharTypeInfo(12);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo)};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo,oi.getTypeInfo());
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(32300.004747),new Double(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testInt() throws HiveException {
GenericUDFOPPositive udf=new GenericUDFOPPositive();
IntWritable input=new IntWritable(747);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableIntObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.intTypeInfo,oi.getTypeInfo());
IntWritable res=(IntWritable)udf.evaluate(args);
Assert.assertEquals(747,res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testByte() throws HiveException {
GenericUDFOPPositive udf=new GenericUDFOPPositive();
ByteWritable input=new ByteWritable((byte)4);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableByteObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.byteTypeInfo,oi.getTypeInfo());
ByteWritable res=(ByteWritable)udf.evaluate(args);
Assert.assertEquals((byte)4,res.get());
}
InternalCallVerifier EqualityVerifier
@Test public void testString() throws HiveException {
GenericUDFOPPositive udf=new GenericUDFOPPositive();
Text input=new Text("32300.004747");
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableStringObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo,oi.getTypeInfo());
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(32300.004747),new Double(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testShort() throws HiveException {
GenericUDFOPPositive udf=new GenericUDFOPPositive();
ShortWritable input=new ShortWritable((short)74);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableShortObjectInspector};
DeferredObject[] args={new DeferredJavaObject(input)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.shortTypeInfo,oi.getTypeInfo());
ShortWritable res=(ShortWritable)udf.evaluate(args);
Assert.assertEquals((short)74,res.get());
}
Class: org.apache.hadoop.hive.ql.udf.generic.TestGenericUDFPosMod InternalCallVerifier EqualityVerifier
@Test public void testDecimalPosModDecimal() throws HiveException {
GenericUDFPosMod udf=new GenericUDFPosMod();
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(3,1)),PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2))};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(5,2),oi.getTypeInfo());
}
InternalCallVerifier EqualityVerifier
@Test public void testDecimalPosModDecimalSameParams() throws HiveException {
GenericUDFPosMod udf=new GenericUDFPosMod();
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2)),PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2))};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(5,2),oi.getTypeInfo());
}
Class: org.apache.hadoop.hive.ql.udf.generic.TestGenericUDFPower InternalCallVerifier EqualityVerifier
@Test public void testLongPowerDecimal() throws HiveException {
GenericUDFPower udf=new GenericUDFPower();
LongWritable left=new LongWritable(10);
HiveDecimalWritable right=new HiveDecimalWritable(HiveDecimal.create("3.14"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableLongObjectInspector,PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(9,4))};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo,oi.getTypeInfo());
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(1380.3842646028852),new Double(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testVarcharPowerInt() throws HiveException {
GenericUDFPower udf=new GenericUDFPower();
HiveVarcharWritable left=new HiveVarcharWritable();
left.set("3.14");
IntWritable right=new IntWritable(2);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector,PrimitiveObjectInspectorFactory.writableIntObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(oi.getTypeInfo(),TypeInfoFactory.doubleTypeInfo);
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(3.14 * 3.14),new Double(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testBytePowerShort() throws HiveException {
GenericUDFPower udf=new GenericUDFPower();
ByteWritable left=new ByteWritable((byte)2);
ShortWritable right=new ShortWritable((short)4);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableByteObjectInspector,PrimitiveObjectInspectorFactory.writableShortObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo,oi.getTypeInfo());
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(16),new Double(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testDecimalPowerDecimal() throws HiveException {
GenericUDFPower udf=new GenericUDFPower();
HiveDecimalWritable left=new HiveDecimalWritable(HiveDecimal.create("14.5"));
HiveDecimalWritable right=new HiveDecimalWritable(HiveDecimal.create("-3.2"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(3,1)),PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2))};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo,oi.getTypeInfo());
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(1.9214203800477838E-4),new Double(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testDoulePowerDecimal() throws HiveException {
GenericUDFPower udf=new GenericUDFPower();
DoubleWritable left=new DoubleWritable(-4.52);
HiveDecimalWritable right=new HiveDecimalWritable(HiveDecimal.create("3"));
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2))};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo,oi.getTypeInfo());
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals("Unexpected result",new Double(-4.52 * 4.52 * 4.52),new Double(res.get()),1e-6);
}
InternalCallVerifier EqualityVerifier
@Test public void testShortPowerFloat() throws HiveException {
GenericUDFPower udf=new GenericUDFPower();
ShortWritable base=new ShortWritable((short)23);
FloatWritable power=new FloatWritable(-1.5f);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableShortObjectInspector,PrimitiveObjectInspectorFactory.writableFloatObjectInspector};
DeferredObject[] args={new DeferredJavaObject(base),new DeferredJavaObject(power)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(oi.getTypeInfo(),TypeInfoFactory.doubleTypeInfo);
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(0.009065844089438033),new Double(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testDoublePowerLong() throws HiveException {
GenericUDFPower udf=new GenericUDFPower();
DoubleWritable left=new DoubleWritable(4.5);
LongWritable right=new LongWritable(4);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,PrimitiveObjectInspectorFactory.writableLongObjectInspector};
DeferredObject[] args={new DeferredJavaObject(left),new DeferredJavaObject(right)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo,oi.getTypeInfo());
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(4.5 * 4.5 * 4.5* 4.5),new Double(res.get()));
}
InternalCallVerifier EqualityVerifier
@Test public void testFloatPowerFloat() throws HiveException {
GenericUDFPower udf=new GenericUDFPower();
FloatWritable f1=new FloatWritable(4.5f);
FloatWritable f2=new FloatWritable(-1.5f);
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableFloatObjectInspector,PrimitiveObjectInspectorFactory.writableFloatObjectInspector};
DeferredObject[] args={new DeferredJavaObject(f1),new DeferredJavaObject(f2)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(oi.getTypeInfo(),TypeInfoFactory.doubleTypeInfo);
DoubleWritable res=(DoubleWritable)udf.evaluate(args);
Assert.assertEquals(new Double(0.10475656017578482),new Double(res.get()));
}
Class: org.apache.hadoop.hive.ql.udf.generic.TestGenericUDFPrintf InternalCallVerifier EqualityVerifier
@Test public void testCharVarcharArgs() throws HiveException {
GenericUDFPrintf udf=new GenericUDFPrintf();
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableStringObjectInspector,PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getCharTypeInfo(5)),PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getVarcharTypeInfo(7))};
HiveCharWritable argChar=new HiveCharWritable();
argChar.set("hello");
HiveVarcharWritable argVarchar=new HiveVarcharWritable();
argVarchar.set("world");
DeferredObject[] args={new DeferredJavaObject(new Text("1st: %s, 2nd: %s")),new DeferredJavaObject(argChar),new DeferredJavaObject(argVarchar)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(PrimitiveObjectInspectorFactory.writableStringObjectInspector,oi);
Text res=(Text)udf.evaluate(args);
Assert.assertEquals("1st: hello, 2nd: world",res.toString());
}
InternalCallVerifier EqualityVerifier
@Test public void testDecimalArgs() throws HiveException {
GenericUDFPrintf udf=new GenericUDFPrintf();
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.writableStringObjectInspector,PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5,2)),PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(3,2))};
HiveDecimalWritable argDec1=new HiveDecimalWritable();
argDec1.set(HiveDecimal.create("234.789"));
HiveDecimalWritable argDec2=new HiveDecimalWritable();
argDec2.set(HiveDecimal.create("3.5"));
DeferredObject[] args={new DeferredJavaObject(new Text("1st: %s, 2nd: %s")),new DeferredJavaObject(argDec1),new DeferredJavaObject(argDec2)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(PrimitiveObjectInspectorFactory.writableStringObjectInspector,oi);
Text res=(Text)udf.evaluate(args);
Assert.assertEquals("1st: 234.79, 2nd: 3.5",res.toString());
}
InternalCallVerifier EqualityVerifier
@Test public void testCharFormat() throws HiveException {
GenericUDFPrintf udf=new GenericUDFPrintf();
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getCharTypeInfo(10)),PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getVarcharTypeInfo(7))};
HiveCharWritable formatChar=new HiveCharWritable();
formatChar.set("arg1=%s");
HiveVarcharWritable argVarchar=new HiveVarcharWritable();
argVarchar.set("world");
DeferredObject[] args={new DeferredJavaObject(formatChar),new DeferredJavaObject(argVarchar)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(PrimitiveObjectInspectorFactory.writableStringObjectInspector,oi);
Text res=(Text)udf.evaluate(args);
Assert.assertEquals("arg1=world",res.toString());
}
InternalCallVerifier EqualityVerifier
@Test public void testVarcharFormat() throws HiveException {
GenericUDFPrintf udf=new GenericUDFPrintf();
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getVarcharTypeInfo(7)),PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getCharTypeInfo(5))};
HiveCharWritable argChar=new HiveCharWritable();
argChar.set("hello");
HiveVarcharWritable formatVarchar=new HiveVarcharWritable();
formatVarchar.set("arg1=%s");
DeferredObject[] args={new DeferredJavaObject(formatVarchar),new DeferredJavaObject(argChar)};
PrimitiveObjectInspector oi=(PrimitiveObjectInspector)udf.initialize(inputOIs);
Assert.assertEquals(PrimitiveObjectInspectorFactory.writableStringObjectInspector,oi);
Text res=(Text)udf.evaluate(args);
Assert.assertEquals("arg1=hello",res.toString());
}
Class: org.apache.hadoop.hive.ql.udf.generic.TestGenericUDFRound InternalCallVerifier EqualityVerifier
@Test public void testDecimalRoundingMetaData2() throws UDFArgumentException {
GenericUDFRound udf=new GenericUDFRound();
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(7,3)),PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.intTypeInfo,new IntWritable(5))};
PrimitiveObjectInspector outputOI=(PrimitiveObjectInspector)udf.initialize(inputOIs);
DecimalTypeInfo outputTypeInfo=(DecimalTypeInfo)outputOI.getTypeInfo();
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(9,5),outputTypeInfo);
}
InternalCallVerifier EqualityVerifier
@Test public void testDecimalRoundingMetaData1() throws UDFArgumentException {
GenericUDFRound udf=new GenericUDFRound();
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(7,3)),PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.intTypeInfo,new IntWritable(-2))};
PrimitiveObjectInspector outputOI=(PrimitiveObjectInspector)udf.initialize(inputOIs);
DecimalTypeInfo outputTypeInfo=(DecimalTypeInfo)outputOI.getTypeInfo();
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(5,0),outputTypeInfo);
}
InternalCallVerifier EqualityVerifier
@Test public void testDecimalRoundingMetaData() throws UDFArgumentException {
GenericUDFRound udf=new GenericUDFRound();
ObjectInspector[] inputOIs={PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(7,3)),PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.intTypeInfo,new IntWritable(2))};
PrimitiveObjectInspector outputOI=(PrimitiveObjectInspector)udf.initialize(inputOIs);
DecimalTypeInfo outputTypeInfo=(DecimalTypeInfo)outputOI.getTypeInfo();
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(7,2),outputTypeInfo);
}
Class: org.apache.hadoop.hive.ql.udf.generic.TestGenericUDFUtils EqualityVerifier
@Test public void testFindText() throws Exception {
Assert.assertEquals(0,GenericUDFUtils.findText(new Text("foobarbar"),new Text("foo"),0));
Assert.assertEquals(3,GenericUDFUtils.findText(new Text("foobarbar"),new Text("bar"),0));
Assert.assertEquals(-1,GenericUDFUtils.findText(new Text("foobarbar"),new Text("xbar"),0));
Assert.assertEquals(6,GenericUDFUtils.findText(new Text("foobarbar"),new Text("bar"),5));
Assert.assertEquals(6,GenericUDFUtils.findText(new Text("foobarbar"),new Text("bar"),5));
Assert.assertEquals(6,GenericUDFUtils.findText(new Text("foobarbar"),new Text("bar"),6));
Assert.assertEquals(-1,GenericUDFUtils.findText(new Text("foobarbar"),new Text("bar"),7));
Assert.assertEquals(-1,GenericUDFUtils.findText(new Text("foobarbar"),new Text("bar"),10));
Assert.assertEquals(-1,GenericUDFUtils.findText(new Text(""),new Text("bar"),0));
Assert.assertEquals(0,GenericUDFUtils.findText(new Text(""),new Text(""),0));
Assert.assertEquals(0,GenericUDFUtils.findText(new Text("foobar"),new Text(""),0));
Assert.assertEquals(0,GenericUDFUtils.findText(new Text("foobar"),new Text(""),6));
Assert.assertEquals(-1,GenericUDFUtils.findText(new Text("foobar"),new Text(""),7));
Assert.assertEquals(4,GenericUDFUtils.findText(new Text("НАСТРОЕние"),new Text("Р"),0));
Assert.assertEquals(15,GenericUDFUtils.findText(new Text("НАСТРОЕние НАСТРОЕние"),new Text("Р"),11));
Assert.assertEquals(3,GenericUDFUtils.findText(new Text("123\uD801\uDC00456"),new Text("\uD801\uDC00"),0));
Assert.assertEquals(4,GenericUDFUtils.findText(new Text("123\uD801\uDC00456"),new Text("4"),0));
}
Class: org.apache.hadoop.hive.ql.udf.xml.TestReusableStringReader APIUtilityVerifier EqualityVerifier
@Test public void testSkip() throws IOException {
Reader reader=new ReusableStringReader();
((ReusableStringReader)reader).set(fox);
long skipped=reader.skip(fox.length() + 1);
assertEquals(fox.length(),skipped);
assertEquals(-1,reader.read());
((ReusableStringReader)reader).set(fox);
char[] cc=new char[6];
int read;
read=reader.read(cc);
assertEquals(6,read);
assertEquals("Quick ",new String(cc));
skipped=reader.skip(30);
assertEquals(30,skipped);
read=reader.read(cc);
assertEquals(4,read);
assertEquals("dog.",new String(cc,0,read));
skipped=reader.skip(300);
assertEquals(0,skipped);
assertEquals(-1,reader.read());
reader.close();
}
APIUtilityVerifier BranchVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testMarkReset() throws IOException {
Reader reader=new ReusableStringReader();
if (reader.markSupported()) {
((ReusableStringReader)reader).set(fox);
assertTrue(reader.ready());
char[] cc=new char[6];
int read;
read=reader.read(cc);
assertEquals(6,read);
assertEquals("Quick ",new String(cc));
reader.mark(100);
read=reader.read(cc);
assertEquals(6,read);
assertEquals("brown ",new String(cc));
reader.reset();
read=reader.read(cc);
assertEquals(6,read);
assertEquals("brown ",new String(cc));
}
reader.close();
}
Class: org.apache.hadoop.hive.ql.udf.xml.TestUDFXPathUtil BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testEvalPositive(){
UDFXPathUtil util=new UDFXPathUtil();
Object result=util.eval("b1 b2 b3 c1 c2 ","a/c[2]/text()",XPathConstants.STRING);
assertEquals("c2",result);
result=util.evalBoolean("true false b3 c1 c2 ","a/b[1]/text()");
assertEquals(Boolean.TRUE,result);
result=util.evalBoolean("true false b3 c1 c2 ","a/b[4]");
assertEquals(Boolean.FALSE,result);
result=util.evalString("true false b3 c1 c2 ","a/b[3]/text()");
assertEquals("b3",result);
result=util.evalString("true false b3 c1 c2 ","a/b[4]/text()");
assertEquals("",result);
result=util.evalString("true FALSE b3 c1 c2 ","a/b[2]/@k");
assertEquals("foo",result);
result=util.evalNumber("true false b3 c1 -77 ","a/c[2]");
assertEquals(-77.0d,result);
result=util.evalNumber("true FALSE b3 c1 c2 ","a/b[2]/@k");
assertEquals(Double.NaN,result);
result=util.evalNode("true false b3 c1 -77 ","a/c[2]");
assertNotNull(result);
assertTrue(result instanceof Node);
result=util.evalNodeList("true false b3 c1 -77 ","a/*");
assertNotNull(result);
assertTrue(result instanceof NodeList);
assertEquals(5,((NodeList)result).getLength());
}
Class: org.apache.hadoop.hive.serde2.TestColumnProjectionUtils APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testReadColumnIds(){
List columnIds=new ArrayList();
List actual;
assertEquals(Collections.EMPTY_LIST,ColumnProjectionUtils.getReadColumnIDs(conf));
actual=ColumnProjectionUtils.getReadColumnIDs(conf);
assertEquals(Collections.EMPTY_LIST,actual);
ColumnProjectionUtils.appendReadColumns(conf,columnIds);
actual=ColumnProjectionUtils.getReadColumnIDs(conf);
assertEquals(Collections.EMPTY_LIST,actual);
assertFalse(ColumnProjectionUtils.isReadAllColumns(conf));
columnIds.add(1);
ColumnProjectionUtils.appendReadColumns(conf,Collections.singletonList(1));
assertEquals(columnIds,ColumnProjectionUtils.getReadColumnIDs(conf));
columnIds.add(2);
ColumnProjectionUtils.appendReadColumns(conf,Collections.singletonList(2));
actual=ColumnProjectionUtils.getReadColumnIDs(conf);
Collections.sort(actual);
assertEquals(columnIds,actual);
columnIds.add(3);
ColumnProjectionUtils.appendReadColumns(conf,Collections.singletonList(3));
actual=ColumnProjectionUtils.getReadColumnIDs(conf);
Collections.sort(actual);
assertEquals(columnIds,actual);
assertFalse(ColumnProjectionUtils.isReadAllColumns(conf));
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testDeprecatedMethods(){
List columnIds=new ArrayList();
List actual;
assertEquals(Collections.EMPTY_LIST,ColumnProjectionUtils.getReadColumnIDs(conf));
actual=ColumnProjectionUtils.getReadColumnIDs(conf);
assertEquals(Collections.EMPTY_LIST,actual);
ColumnProjectionUtils.setReadColumnIDs(conf,columnIds);
actual=ColumnProjectionUtils.getReadColumnIDs(conf);
assertEquals(Collections.EMPTY_LIST,actual);
columnIds.add(1);
ColumnProjectionUtils.setReadColumnIDs(conf,Collections.singletonList(1));
actual=ColumnProjectionUtils.getReadColumnIDs(conf);
Collections.sort(actual);
assertEquals(columnIds,actual);
columnIds.add(2);
ColumnProjectionUtils.appendReadColumnIDs(conf,Collections.singletonList(2));
actual=ColumnProjectionUtils.getReadColumnIDs(conf);
Collections.sort(actual);
assertEquals(columnIds,actual);
assertFalse(ColumnProjectionUtils.isReadAllColumns(conf));
ColumnProjectionUtils.setFullyReadColumns(conf);
assertTrue(ColumnProjectionUtils.isReadAllColumns(conf));
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testReadAllColumns(){
assertTrue(ColumnProjectionUtils.isReadAllColumns(conf));
ColumnProjectionUtils.appendReadColumns(conf,Arrays.asList(new Integer[]{0,1,2}));
ColumnProjectionUtils.setReadAllColumns(conf);
assertEquals(Collections.EMPTY_LIST,ColumnProjectionUtils.getReadColumnIDs(conf));
assertTrue(ColumnProjectionUtils.isReadAllColumns(conf));
}
Class: org.apache.hadoop.hive.serde2.TestOpenCSVSerde InternalCallVerifier EqualityVerifier
@Test public void testDeserialize() throws Exception {
csv.initialize(null,props);
final Text in=new Text("hello,\"yes, okay\",1");
final List row=(List)csv.deserialize(in);
assertEquals("hello",row.get(0));
assertEquals("yes, okay",row.get(1));
assertEquals("1",row.get(2));
}
InternalCallVerifier EqualityVerifier
@Test public void testDeserializeCustomSeparators() throws Exception {
props.setProperty(OpenCSVSerde.SEPARATORCHAR,"\t");
props.setProperty(OpenCSVSerde.QUOTECHAR,"'");
csv.initialize(null,props);
final Text in=new Text("hello\t'yes\tokay'\t1");
final List row=(List)csv.deserialize(in);
assertEquals("hello",row.get(0));
assertEquals("yes\tokay",row.get(1));
assertEquals("1",row.get(2));
}
InternalCallVerifier EqualityVerifier
@Test public void testDeserializeCustomEscape() throws Exception {
props.setProperty(OpenCSVSerde.QUOTECHAR,"'");
props.setProperty(OpenCSVSerde.ESCAPECHAR,"\\");
csv.initialize(null,props);
final Text in=new Text("hello,'yes\\'okay',1");
final List row=(List)csv.deserialize(in);
assertEquals("hello",row.get(0));
assertEquals("yes'okay",row.get(1));
assertEquals("1",row.get(2));
}
Class: org.apache.hadoop.hive.serde2.avro.TestAvroDeserializer APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void canDeserializeMapsWithJavaLangStringKeys() throws IOException, SerDeException {
String schemaString="{\n" + " \"namespace\": \"testing\",\n" + " \"name\": \"oneMap\",\n"+ " \"type\": \"record\",\n"+ " \"fields\": [\n"+ " {\n"+ " \"name\":\"aMap\",\n"+ " \"type\":{\"type\":\"map\",\n"+ " \"avro.java.string\":\"String\",\n"+ " \"values\":\"long\"}\n"+ "\t}\n"+ " ]\n"+ "}";
Schema s=AvroSerdeUtils.getSchemaFor(schemaString);
GenericData.Record record=new GenericData.Record(s);
Map m=new Hashtable();
m.put("one",1l);
m.put("two",2l);
m.put("three",3l);
record.put("aMap",m);
assertTrue(GENERIC_DATA.validate(s,record));
System.out.println("record = " + record);
AvroGenericRecordWritable garw=Utils.serializeAndDeserializeRecord(record);
AvroObjectInspectorGenerator aoig=new AvroObjectInspectorGenerator(s);
AvroDeserializer de=new AvroDeserializer();
ArrayList row=(ArrayList)de.deserialize(aoig.getColumnNames(),aoig.getColumnTypes(),garw,s);
assertEquals(1,row.size());
Object theMapObject=row.get(0);
assertTrue(theMapObject instanceof Map);
Map theMap=(Map)theMapObject;
assertEquals(1l,theMap.get("one"));
assertEquals(2l,theMap.get("two"));
assertEquals(3l,theMap.get("three"));
StandardStructObjectInspector oi=(StandardStructObjectInspector)aoig.getObjectInspector();
List z=oi.getStructFieldsDataAsList(row);
assertEquals(1,z.size());
StructField fieldRef=oi.getStructFieldRef("amap");
Map theMap2=(Map)oi.getStructFieldData(row,fieldRef);
assertEquals(1l,theMap2.get("one"));
assertEquals(2l,theMap2.get("two"));
assertEquals(3l,theMap2.get("three"));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void canDeserializeEnums() throws SerDeException, IOException {
Schema s=AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.ENUM_SCHEMA);
GenericData.Record record=new GenericData.Record(s);
record.put("baddies",new GenericData.EnumSymbol(s.getField("baddies").schema(),"DALEKS"));
assertTrue(GENERIC_DATA.validate(s,record));
AvroGenericRecordWritable garw=Utils.serializeAndDeserializeRecord(record);
AvroObjectInspectorGenerator aoig=new AvroObjectInspectorGenerator(s);
AvroDeserializer de=new AvroDeserializer();
ArrayList row=(ArrayList)de.deserialize(aoig.getColumnNames(),aoig.getColumnTypes(),garw,s);
assertEquals(1,row.size());
StandardStructObjectInspector oi=(StandardStructObjectInspector)aoig.getObjectInspector();
List extends StructField> fieldRefs=oi.getAllStructFieldRefs();
assertEquals(1,fieldRefs.size());
StructField fieldRef=fieldRefs.get(0);
assertEquals("baddies",fieldRef.getFieldName());
Object theStringObject=oi.getStructFieldData(row,fieldRef);
assertTrue(fieldRef.getFieldObjectInspector() instanceof StringObjectInspector);
StringObjectInspector soi=(StringObjectInspector)fieldRef.getFieldObjectInspector();
String finalValue=soi.getPrimitiveJavaObject(theStringObject);
assertEquals("DALEKS",finalValue);
}
APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void canDeserializeBytes() throws SerDeException, IOException {
Schema s=AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.BYTES_SCHEMA);
GenericData.Record record=new GenericData.Record(s);
byte[] bytes="ANANCIENTBLUEBOX".getBytes();
ByteBuffer bb=ByteBuffer.wrap(bytes);
bb.rewind();
record.put("bytesField",bb);
assertTrue(GENERIC_DATA.validate(s,record));
AvroGenericRecordWritable garw=Utils.serializeAndDeserializeRecord(record);
AvroObjectInspectorGenerator aoig=new AvroObjectInspectorGenerator(s);
AvroDeserializer de=new AvroDeserializer();
ArrayList row=(ArrayList)de.deserialize(aoig.getColumnNames(),aoig.getColumnTypes(),garw,s);
assertEquals(1,row.size());
Object byteObject=row.get(0);
assertTrue(byteObject instanceof byte[]);
byte[] outBytes=(byte[])byteObject;
for (int i=0; i < bytes.length; i++) {
assertEquals(bytes[i],outBytes[i]);
}
StandardStructObjectInspector oi=(StandardStructObjectInspector)aoig.getObjectInspector();
List fieldsDataAsList=oi.getStructFieldsDataAsList(row);
assertEquals(1,fieldsDataAsList.size());
StructField fieldRef=oi.getStructFieldRef("bytesField");
outBytes=(byte[])oi.getStructFieldData(row,fieldRef);
for (int i=0; i < outBytes.length; i++) {
assertEquals(bytes[i],outBytes[i]);
}
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void canDeserializeVoidType() throws IOException, SerDeException {
String schemaString="{\n" + " \"type\": \"record\", \n" + " \"name\": \"nullTest\",\n"+ " \"fields\" : [\n"+ " {\"name\": \"isANull\", \"type\": \"null\"}\n"+ " ]\n"+ "}";
Schema s=AvroSerdeUtils.getSchemaFor(schemaString);
GenericData.Record record=new GenericData.Record(s);
record.put("isANull",null);
assertTrue(GENERIC_DATA.validate(s,record));
AvroGenericRecordWritable garw=Utils.serializeAndDeserializeRecord(record);
AvroObjectInspectorGenerator aoig=new AvroObjectInspectorGenerator(s);
AvroDeserializer de=new AvroDeserializer();
ArrayList row=(ArrayList)de.deserialize(aoig.getColumnNames(),aoig.getColumnTypes(),garw,s);
assertEquals(1,row.size());
Object theVoidObject=row.get(0);
assertNull(theVoidObject);
StandardStructObjectInspector oi=(StandardStructObjectInspector)aoig.getObjectInspector();
StructField fieldRef=oi.getStructFieldRef("isANull");
Object shouldBeNull=oi.getStructFieldData(row,fieldRef);
assertNull(shouldBeNull);
assertTrue(fieldRef.getFieldObjectInspector() instanceof VoidObjectInspector);
}
APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void canDeserializeFixed() throws SerDeException, IOException {
Schema s=AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.FIXED_SCHEMA);
GenericData.Record record=new GenericData.Record(s);
byte[] bytes="ANANCIENTBLUEBOX".getBytes();
record.put("hash",new GenericData.Fixed(s,bytes));
assertTrue(GENERIC_DATA.validate(s,record));
AvroGenericRecordWritable garw=Utils.serializeAndDeserializeRecord(record);
AvroObjectInspectorGenerator aoig=new AvroObjectInspectorGenerator(s);
AvroDeserializer de=new AvroDeserializer();
ArrayList row=(ArrayList)de.deserialize(aoig.getColumnNames(),aoig.getColumnTypes(),garw,s);
assertEquals(1,row.size());
Object byteObject=row.get(0);
assertTrue(byteObject instanceof byte[]);
byte[] outBytes=(byte[])byteObject;
for (int i=0; i < bytes.length; i++) {
assertEquals(bytes[i],outBytes[i]);
}
StandardStructObjectInspector oi=(StandardStructObjectInspector)aoig.getObjectInspector();
List fieldsDataAsList=oi.getStructFieldsDataAsList(row);
assertEquals(1,fieldsDataAsList.size());
StructField fieldRef=oi.getStructFieldRef("hash");
outBytes=(byte[])oi.getStructFieldData(row,fieldRef);
for (int i=0; i < outBytes.length; i++) {
assertEquals(bytes[i],outBytes[i]);
}
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void canDeserializeEvolvedUnions2() throws SerDeException, IOException {
Schema ws=AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.UNION_SCHEMA_3);
Schema rs=AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.UNION_SCHEMA_4);
GenericData.Record record=new GenericData.Record(ws);
record.put("aUnion",90);
ResultPair result=unionTester(ws,rs,record);
assertTrue(result.value instanceof Integer);
assertEquals(90,result.value);
UnionObjectInspector uoi=(UnionObjectInspector)result.oi;
assertEquals(0,uoi.getTag(result.unionObject));
record=new GenericData.Record(ws);
record.put("aUnion",99.9f);
result=unionTester(ws,rs,record);
assertTrue(result.value instanceof Float);
assertEquals(99.9f,result.value);
uoi=(UnionObjectInspector)result.oi;
assertEquals(1,uoi.getTag(result.unionObject));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void canDeserializeEvolvedUnions1() throws SerDeException, IOException {
Schema ws=AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.UNION_SCHEMA);
Schema rs=AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.UNION_SCHEMA_2);
GenericData.Record record=new GenericData.Record(ws);
record.put("aUnion","this is a string");
ResultPair result=unionTester(ws,rs,record);
assertTrue(result.value instanceof String);
assertEquals("this is a string",result.value);
UnionObjectInspector uoi=(UnionObjectInspector)result.oi;
assertEquals(2,uoi.getTag(result.unionObject));
record=new GenericData.Record(ws);
record.put("aUnion",99);
result=unionTester(ws,rs,record);
assertTrue(result.value instanceof Integer);
assertEquals(99,result.value);
uoi=(UnionObjectInspector)result.oi;
assertEquals(1,uoi.getTag(result.unionObject));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void canDeserializeArrays() throws SerDeException, IOException {
Schema s=AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.ARRAY_WITH_PRIMITIVE_ELEMENT_TYPE);
GenericData.Record record=new GenericData.Record(s);
List list=new ArrayList();
list.add("Eccleston");
list.add("Tennant");
list.add("Smith");
record.put("anArray",list);
assertTrue(GENERIC_DATA.validate(s,record));
System.out.println("Array-backed record = " + record);
AvroGenericRecordWritable garw=Utils.serializeAndDeserializeRecord(record);
AvroObjectInspectorGenerator aoig=new AvroObjectInspectorGenerator(s);
AvroDeserializer de=new AvroDeserializer();
ArrayList row=(ArrayList)de.deserialize(aoig.getColumnNames(),aoig.getColumnTypes(),garw,s);
assertEquals(1,row.size());
Object theArrayObject=row.get(0);
assertTrue(theArrayObject instanceof List);
List theList=(List)theArrayObject;
assertEquals("Eccleston",theList.get(0));
assertEquals("Tennant",theList.get(1));
assertEquals("Smith",theList.get(2));
StandardStructObjectInspector oi=(StandardStructObjectInspector)aoig.getObjectInspector();
StructField fieldRefToArray=oi.getStructFieldRef("anArray");
Object anArrayData=oi.getStructFieldData(row,fieldRefToArray);
StandardListObjectInspector anArrayOI=(StandardListObjectInspector)fieldRefToArray.getFieldObjectInspector();
assertEquals(3,anArrayOI.getListLength(anArrayData));
JavaStringObjectInspector elementOI=(JavaStringObjectInspector)anArrayOI.getListElementObjectInspector();
Object firstElement=anArrayOI.getListElement(anArrayData,0);
assertEquals("Eccleston",elementOI.getPrimitiveJavaObject(firstElement));
assertTrue(firstElement instanceof String);
Object secondElement=anArrayOI.getListElement(anArrayData,1);
assertEquals("Tennant",elementOI.getPrimitiveJavaObject(secondElement));
assertTrue(secondElement instanceof String);
Object thirdElement=anArrayOI.getListElement(anArrayData,2);
assertEquals("Smith",elementOI.getPrimitiveJavaObject(thirdElement));
assertTrue(thirdElement instanceof String);
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void canDeserializeMapWithNullablePrimitiveValues() throws SerDeException, IOException {
Schema s=AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.MAP_WITH_NULLABLE_PRIMITIVE_VALUE_TYPE_SCHEMA);
GenericData.Record record=new GenericData.Record(s);
Map m=new HashMap();
m.put("one",1l);
m.put("two",2l);
m.put("three",3l);
m.put("mu",null);
record.put("aMap",m);
assertTrue(GENERIC_DATA.validate(s,record));
System.out.println("record = " + record);
AvroGenericRecordWritable garw=Utils.serializeAndDeserializeRecord(record);
AvroObjectInspectorGenerator aoig=new AvroObjectInspectorGenerator(s);
AvroDeserializer de=new AvroDeserializer();
ArrayList row=(ArrayList)de.deserialize(aoig.getColumnNames(),aoig.getColumnTypes(),garw,s);
assertEquals(1,row.size());
Object theMapObject=row.get(0);
assertTrue(theMapObject instanceof Map);
Map theMap=(Map)theMapObject;
assertEquals(1l,theMap.get("one"));
assertEquals(2l,theMap.get("two"));
assertEquals(3l,theMap.get("three"));
assertTrue(theMap.containsKey("mu"));
assertEquals(null,theMap.get("mu"));
StandardStructObjectInspector oi=(StandardStructObjectInspector)aoig.getObjectInspector();
List z=oi.getStructFieldsDataAsList(row);
assertEquals(1,z.size());
StructField fieldRef=oi.getStructFieldRef("amap");
Map theMap2=(Map)oi.getStructFieldData(row,fieldRef);
assertEquals(1l,theMap2.get("one"));
assertEquals(2l,theMap2.get("two"));
assertEquals(3l,theMap2.get("three"));
assertTrue(theMap2.containsKey("mu"));
assertEquals(null,theMap2.get("mu"));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void canDeserializeUnions() throws SerDeException, IOException {
Schema s=AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.UNION_SCHEMA);
GenericData.Record record=new GenericData.Record(s);
record.put("aUnion","this is a string");
ResultPair result=unionTester(s,record);
assertTrue(result.value instanceof String);
assertEquals("this is a string",result.value);
UnionObjectInspector uoi=(UnionObjectInspector)result.oi;
assertEquals(1,uoi.getTag(result.unionObject));
record=new GenericData.Record(s);
record.put("aUnion",99);
result=unionTester(s,record);
assertTrue(result.value instanceof Integer);
assertEquals(99,result.value);
uoi=(UnionObjectInspector)result.oi;
assertEquals(0,uoi.getTag(result.unionObject));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void canDeserializeMapsWithPrimitiveKeys() throws SerDeException, IOException {
Schema s=AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.MAP_WITH_PRIMITIVE_VALUE_TYPE);
GenericData.Record record=new GenericData.Record(s);
Map m=new Hashtable();
m.put("one",1l);
m.put("two",2l);
m.put("three",3l);
record.put("aMap",m);
assertTrue(GENERIC_DATA.validate(s,record));
System.out.println("record = " + record);
AvroGenericRecordWritable garw=Utils.serializeAndDeserializeRecord(record);
AvroObjectInspectorGenerator aoig=new AvroObjectInspectorGenerator(s);
AvroDeserializer de=new AvroDeserializer();
ArrayList row=(ArrayList)de.deserialize(aoig.getColumnNames(),aoig.getColumnTypes(),garw,s);
assertEquals(1,row.size());
Object theMapObject=row.get(0);
assertTrue(theMapObject instanceof Map);
Map theMap=(Map)theMapObject;
assertEquals(1l,theMap.get("one"));
assertEquals(2l,theMap.get("two"));
assertEquals(3l,theMap.get("three"));
StandardStructObjectInspector oi=(StandardStructObjectInspector)aoig.getObjectInspector();
List z=oi.getStructFieldsDataAsList(row);
assertEquals(1,z.size());
StructField fieldRef=oi.getStructFieldRef("amap");
Map theMap2=(Map)oi.getStructFieldData(row,fieldRef);
assertEquals(1l,theMap2.get("one"));
assertEquals(2l,theMap2.get("two"));
assertEquals(3l,theMap2.get("three"));
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void verifyCaching() throws SerDeException, IOException {
Schema s=AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.RECORD_SCHEMA);
GenericData.Record record=new GenericData.Record(s);
GenericData.Record innerRecord=new GenericData.Record(s.getField("aRecord").schema());
innerRecord.put("int1",42);
innerRecord.put("boolean1",true);
innerRecord.put("long1",42432234234l);
record.put("aRecord",innerRecord);
assertTrue(GENERIC_DATA.validate(s,record));
AvroGenericRecordWritable garw=Utils.serializeAndDeserializeRecord(record);
UID recordReaderID=new UID();
garw.setRecordReaderID(recordReaderID);
AvroObjectInspectorGenerator aoig=new AvroObjectInspectorGenerator(s);
AvroDeserializer de=new AvroDeserializer();
ArrayList row=(ArrayList)de.deserialize(aoig.getColumnNames(),aoig.getColumnTypes(),garw,s);
assertEquals(1,de.getNoEncodingNeeded().size());
assertEquals(0,de.getReEncoderCache().size());
row=(ArrayList)de.deserialize(aoig.getColumnNames(),aoig.getColumnTypes(),garw,s);
assertEquals(1,de.getNoEncodingNeeded().size());
assertEquals(0,de.getReEncoderCache().size());
garw.setRecordReaderID(new UID());
row=(ArrayList)de.deserialize(aoig.getColumnNames(),aoig.getColumnTypes(),garw,s);
assertEquals(2,de.getNoEncodingNeeded().size());
assertEquals(0,de.getReEncoderCache().size());
Schema evolvedSchema=AvroSerdeUtils.getSchemaFor(s.toString());
evolvedSchema.getField("aRecord").schema().addProp("Testing","meaningless");
garw.setRecordReaderID(recordReaderID=new UID());
row=(ArrayList)de.deserialize(aoig.getColumnNames(),aoig.getColumnTypes(),garw,evolvedSchema);
assertEquals(2,de.getNoEncodingNeeded().size());
assertEquals(1,de.getReEncoderCache().size());
garw.setRecordReaderID(recordReaderID);
row=(ArrayList)de.deserialize(aoig.getColumnNames(),aoig.getColumnTypes(),garw,evolvedSchema);
assertEquals(2,de.getNoEncodingNeeded().size());
assertEquals(1,de.getReEncoderCache().size());
}
Class: org.apache.hadoop.hive.serde2.avro.TestAvroObjectInspectorGenerator APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void convertsNullableEnum() throws SerDeException {
Schema s=AvroSerdeUtils.getSchemaFor(NULLABLE_ENUM_SCHEMA);
AvroObjectInspectorGenerator aoig=new AvroObjectInspectorGenerator(s);
assertEquals(1,aoig.getColumnNames().size());
assertEquals("nullableEnum",aoig.getColumnNames().get(0));
assertEquals(1,aoig.getColumnTypes().size());
TypeInfo typeInfo=aoig.getColumnTypes().get(0);
assertTrue(typeInfo instanceof PrimitiveTypeInfo);
PrimitiveTypeInfo pti=(PrimitiveTypeInfo)typeInfo;
assertEquals(PrimitiveObjectInspector.PrimitiveCategory.STRING,pti.getPrimitiveCategory());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void canHandleArrays() throws SerDeException {
Schema s=AvroSerdeUtils.getSchemaFor(ARRAY_WITH_PRIMITIVE_ELEMENT_TYPE);
AvroObjectInspectorGenerator aoig=new AvroObjectInspectorGenerator(s);
assertEquals(1,aoig.getColumnNames().size());
assertEquals("anArray",aoig.getColumnNames().get(0));
assertEquals(1,aoig.getColumnTypes().size());
TypeInfo typeInfo=aoig.getColumnTypes().get(0);
assertEquals(ObjectInspector.Category.LIST,typeInfo.getCategory());
assertTrue(typeInfo instanceof ListTypeInfo);
ListTypeInfo listTypeInfo=(ListTypeInfo)typeInfo;
assertEquals("string",listTypeInfo.getListElementTypeInfo().getTypeName());
}
APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void primitiveTypesWorkCorrectly() throws SerDeException {
final String bunchOfPrimitives="{\n" + " \"namespace\": \"testing\",\n" + " \"name\": \"PrimitiveTypes\",\n"+ " \"type\": \"record\",\n"+ " \"fields\": [\n"+ " {\n"+ " \"name\":\"aString\",\n"+ " \"type\":\"string\"\n"+ " },\n"+ " {\n"+ " \"name\":\"anInt\",\n"+ " \"type\":\"int\"\n"+ " },\n"+ " {\n"+ " \"name\":\"aBoolean\",\n"+ " \"type\":\"boolean\"\n"+ " },\n"+ " {\n"+ " \"name\":\"aLong\",\n"+ " \"type\":\"long\"\n"+ " },\n"+ " {\n"+ " \"name\":\"aFloat\",\n"+ " \"type\":\"float\"\n"+ " },\n"+ " {\n"+ " \"name\":\"aDouble\",\n"+ " \"type\":\"double\"\n"+ " },\n"+ " {\n"+ " \"name\":\"aNull\",\n"+ " \"type\":\"null\"\n"+ " }\n"+ " ]\n"+ "}";
AvroObjectInspectorGenerator aoig=new AvroObjectInspectorGenerator(AvroSerdeUtils.getSchemaFor(bunchOfPrimitives));
String[] expectedColumnNames={"aString","anInt","aBoolean","aLong","aFloat","aDouble","aNull"};
verifyColumnNames(expectedColumnNames,aoig.getColumnNames());
TypeInfo[] expectedColumnTypes={STRING,INT,BOOLEAN,LONG,FLOAT,DOUBLE,VOID};
verifyColumnTypes(expectedColumnTypes,aoig.getColumnTypes());
final ObjectInspector oi=aoig.getObjectInspector();
assertTrue(oi instanceof StandardStructObjectInspector);
final StandardStructObjectInspector ssoi=(StandardStructObjectInspector)oi;
List extends StructField> structFields=ssoi.getAllStructFieldRefs();
assertEquals(expectedColumnNames.length,structFields.size());
for (int i=0; i < expectedColumnNames.length; i++) {
assertEquals("Column names don't match",expectedColumnNames[i].toLowerCase(),structFields.get(i).getFieldName());
assertEquals("Column types don't match",expectedColumnTypes[i].getTypeName(),structFields.get(i).getFieldObjectInspector().getTypeName());
}
}
APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void canHandleRecords() throws SerDeException {
Schema s=AvroSerdeUtils.getSchemaFor(RECORD_SCHEMA);
AvroObjectInspectorGenerator aoig=new AvroObjectInspectorGenerator(s);
assertEquals(1,aoig.getColumnNames().size());
assertEquals("aRecord",aoig.getColumnNames().get(0));
assertEquals(1,aoig.getColumnTypes().size());
TypeInfo typeInfo=aoig.getColumnTypes().get(0);
assertEquals(ObjectInspector.Category.STRUCT,typeInfo.getCategory());
assertTrue(typeInfo instanceof StructTypeInfo);
StructTypeInfo structTypeInfo=(StructTypeInfo)typeInfo;
ArrayList allStructFieldNames=structTypeInfo.getAllStructFieldNames();
ArrayList allStructFieldTypeInfos=structTypeInfo.getAllStructFieldTypeInfos();
assertEquals(allStructFieldNames.size(),3);
String[] names=new String[]{"int1","boolean1","long1"};
String[] typeInfoStrings=new String[]{"int","boolean","bigint"};
for (int i=0; i < allStructFieldNames.size(); i++) {
assertEquals("Fieldname " + allStructFieldNames.get(i) + " doesn't match expected "+ names[i],names[i],allStructFieldNames.get(i));
assertEquals("Typeinfo " + allStructFieldTypeInfos.get(i) + " doesn't match expected "+ typeInfoStrings[i],typeInfoStrings[i],allStructFieldTypeInfos.get(i).getTypeName());
}
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void canHandleBytes() throws SerDeException {
Schema s=AvroSerdeUtils.getSchemaFor(BYTES_SCHEMA);
AvroObjectInspectorGenerator aoig=new AvroObjectInspectorGenerator(s);
assertEquals(1,aoig.getColumnNames().size());
assertEquals("bytesField",aoig.getColumnNames().get(0));
assertEquals(1,aoig.getColumnTypes().size());
TypeInfo typeInfo=aoig.getColumnTypes().get(0);
assertTrue(typeInfo instanceof PrimitiveTypeInfo);
assertEquals(((PrimitiveTypeInfo)typeInfo).getPrimitiveCategory(),PrimitiveCategory.BINARY);
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void canHandleFixed() throws SerDeException {
Schema s=AvroSerdeUtils.getSchemaFor(FIXED_SCHEMA);
AvroObjectInspectorGenerator aoig=new AvroObjectInspectorGenerator(s);
assertEquals(1,aoig.getColumnNames().size());
assertEquals("hash",aoig.getColumnNames().get(0));
assertEquals(1,aoig.getColumnTypes().size());
TypeInfo typeInfo=aoig.getColumnTypes().get(0);
assertTrue(typeInfo instanceof PrimitiveTypeInfo);
assertEquals(((PrimitiveTypeInfo)typeInfo).getPrimitiveCategory(),PrimitiveCategory.BINARY);
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void canHandleUnions() throws SerDeException {
Schema s=AvroSerdeUtils.getSchemaFor(UNION_SCHEMA);
AvroObjectInspectorGenerator aoig=new AvroObjectInspectorGenerator(s);
assertEquals(1,aoig.getColumnNames().size());
assertEquals("aUnion",aoig.getColumnNames().get(0));
assertEquals(1,aoig.getColumnTypes().size());
TypeInfo typeInfo=aoig.getColumnTypes().get(0);
assertTrue(typeInfo instanceof UnionTypeInfo);
UnionTypeInfo uti=(UnionTypeInfo)typeInfo;
List typeInfos=uti.getAllUnionObjectTypeInfos();
assertEquals(2,typeInfos.size());
assertEquals(INT,typeInfos.get(0));
assertEquals(STRING,typeInfos.get(1));
assertEquals("uniontype",uti.getTypeName());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void canHandleEnums() throws SerDeException {
Schema s=AvroSerdeUtils.getSchemaFor(ENUM_SCHEMA);
AvroObjectInspectorGenerator aoig=new AvroObjectInspectorGenerator(s);
assertEquals(1,aoig.getColumnNames().size());
assertEquals("baddies",aoig.getColumnNames().get(0));
assertEquals(1,aoig.getColumnTypes().size());
assertEquals(STRING,aoig.getColumnTypes().get(0));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void convertsNullableTypes() throws SerDeException {
Schema s=AvroSerdeUtils.getSchemaFor(NULLABLE_STRING_SCHEMA);
AvroObjectInspectorGenerator aoig=new AvroObjectInspectorGenerator(s);
assertEquals(1,aoig.getColumnNames().size());
assertEquals("nullableString",aoig.getColumnNames().get(0));
assertEquals(1,aoig.getColumnTypes().size());
TypeInfo typeInfo=aoig.getColumnTypes().get(0);
assertTrue(typeInfo instanceof PrimitiveTypeInfo);
PrimitiveTypeInfo pti=(PrimitiveTypeInfo)typeInfo;
assertEquals(PrimitiveObjectInspector.PrimitiveCategory.STRING,pti.getPrimitiveCategory());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void objectInspectorsAreCached() throws SerDeException {
Schema s=AvroSerdeUtils.getSchemaFor(KITCHEN_SINK_SCHEMA);
AvroObjectInspectorGenerator aoig=new AvroObjectInspectorGenerator(s);
Schema s2=AvroSerdeUtils.getSchemaFor(KITCHEN_SINK_SCHEMA);
AvroObjectInspectorGenerator aoig2=new AvroObjectInspectorGenerator(s2);
assertEquals(aoig.getObjectInspector(),aoig2.getObjectInspector());
assertTrue(aoig.getObjectInspector() == aoig2.getObjectInspector());
}
Class: org.apache.hadoop.hive.serde2.avro.TestAvroSerde EqualityVerifier
@Test public void getSerializedClassReturnsCorrectType(){
AvroSerDe asd=new AvroSerDe();
assertEquals(AvroGenericRecordWritable.class,asd.getSerializedClass());
}
InternalCallVerifier EqualityVerifier
@Test public void initializeDoesNotReuseSchemasFromConf() throws SerDeException {
Configuration conf=new Configuration();
conf.set(AvroTableProperties.AVRO_SERDE_SCHEMA.getPropName(),originalSchema.toString(false));
Properties props=new Properties();
props.put(AvroTableProperties.SCHEMA_LITERAL.getPropName(),newSchemaString);
AvroSerDe asd=new AvroSerDe();
SerDeUtils.initializeSerDe(asd,conf,props,null);
assertEquals(newSchema,AvroSerdeUtils.getSchemaFor(conf.get(AvroTableProperties.AVRO_SERDE_SCHEMA.getPropName())));
}
Class: org.apache.hadoop.hive.serde2.avro.TestAvroSerdeUtils APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void determineSchemaCanReadSchemaFromHDFS() throws IOException, AvroSerdeException, URISyntaxException {
String schemaString=TestAvroObjectInspectorGenerator.RECORD_SCHEMA;
MiniDFSCluster miniDfs=null;
try {
miniDfs=new MiniDFSCluster(new Configuration(),1,true,null);
miniDfs.getFileSystem().mkdirs(new Path("/path/to/schema"));
FSDataOutputStream out=miniDfs.getFileSystem().create(new Path("/path/to/schema/schema.avsc"));
out.writeBytes(schemaString);
out.close();
String onHDFS=miniDfs.getFileSystem().getUri() + "/path/to/schema/schema.avsc";
Schema schemaFromHDFS=AvroSerdeUtils.getSchemaFromFS(onHDFS,miniDfs.getFileSystem().getConf());
Schema expectedSchema=AvroSerdeUtils.getSchemaFor(schemaString);
assertEquals(expectedSchema,schemaFromHDFS);
}
finally {
if (miniDfs != null) miniDfs.shutdown();
}
}
UtilityVerifier EqualityVerifier HybridVerifier
@Test public void detemineSchemaTriesToOpenUrl() throws AvroSerdeException, IOException {
Configuration conf=new Configuration();
Properties props=new Properties();
props.put(AvroTableProperties.SCHEMA_URL.getPropName(),"not:///a.real.url");
try {
AvroSerdeUtils.determineSchemaOrThrowException(conf,props);
fail("Should have tried to open that URL");
}
catch ( AvroSerdeException e) {
assertEquals("Unable to read schema from given path: not:///a.real.url",e.getMessage());
}
}
APIUtilityVerifier UtilityVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void noneOptionWorksForSpecifyingSchemas() throws IOException, AvroSerdeException {
Configuration conf=new Configuration();
Properties props=new Properties();
props.put(AvroTableProperties.SCHEMA_URL.getPropName(),SCHEMA_NONE);
props.put(AvroTableProperties.SCHEMA_LITERAL.getPropName(),SCHEMA_NONE);
try {
determineSchemaOrThrowException(conf,props);
fail("Should have thrown exception with none set for both url and literal");
}
catch ( AvroSerdeException he) {
assertEquals(EXCEPTION_MESSAGE,he.getMessage());
}
props.put(AvroTableProperties.SCHEMA_LITERAL.getPropName(),TestAvroObjectInspectorGenerator.RECORD_SCHEMA);
Schema s;
try {
s=determineSchemaOrThrowException(conf,props);
assertNotNull(s);
assertEquals(AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.RECORD_SCHEMA),s);
}
catch ( AvroSerdeException he) {
fail("Should have parsed schema literal, not thrown exception.");
}
props.put(AvroTableProperties.SCHEMA_LITERAL.getPropName(),SCHEMA_NONE);
props.put(AvroTableProperties.SCHEMA_URL.getPropName(),"not:///a.real.url");
try {
determineSchemaOrThrowException(conf,props);
fail("Should have tried to open that bogus URL");
}
catch ( AvroSerdeException e) {
assertEquals("Unable to read schema from given path: not:///a.real.url",e.getMessage());
}
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void getTypeFromNullableTypePositiveCase(){
Schema s=AvroSerdeUtils.getSchemaFor(NULLABLE_UNION);
Schema typeFromNullableType=getOtherTypeFromNullableType(s.getField("mayBeNull").schema());
assertEquals(Schema.Type.STRING,typeFromNullableType.getType());
s=AvroSerdeUtils.getSchemaFor(NULLABLE_UNION2);
typeFromNullableType=getOtherTypeFromNullableType(s.getField("mayBeNull").schema());
assertEquals(Schema.Type.STRING,typeFromNullableType.getType());
}
APIUtilityVerifier EqualityVerifier
@Test public void determineSchemaFindsLiterals() throws Exception {
String schema=TestAvroObjectInspectorGenerator.RECORD_SCHEMA;
Configuration conf=new Configuration();
Properties props=new Properties();
props.put(AvroTableProperties.SCHEMA_LITERAL.getPropName(),schema);
Schema expected=AvroSerdeUtils.getSchemaFor(schema);
assertEquals(expected,AvroSerdeUtils.determineSchemaOrThrowException(conf,props));
}
Class: org.apache.hadoop.hive.serde2.avro.TestAvroSerializer APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void canSerializeUnions() throws SerDeException, IOException {
String field="{ \"name\":\"union1\", \"type\":[\"float\", \"boolean\", \"string\"," + " {\"type\":\"bytes\", \"logicalType\":\"decimal\", \"precision\":5, \"scale\":4}] }";
GenericRecord r=serializeAndDeserialize(field,"union1",424.4f);
assertEquals(424.4f,r.get("union1"));
r=serializeAndDeserialize(field,"union1",true);
assertEquals(true,r.get("union1"));
r=serializeAndDeserialize(field,"union1","hello");
assertEquals("hello",r.get("union1"));
HiveDecimal dec=HiveDecimal.create("3.1415926");
r=serializeAndDeserialize(field,"union1",AvroSerdeUtils.getBufferFromDecimal(dec,4));
HiveDecimal dec1=AvroSerdeUtils.getHiveDecimalFromByteBuffer((ByteBuffer)r.get("union1"),4);
assertEquals(dec.setScale(4),dec1);
}
APIUtilityVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void canSerializeNullableEnums() throws SerDeException, IOException {
String type="{\"type\": \"enum\", \"name\": \"enum1_values\",\n" + " \"namespace\": \"org.apache.hadoop.hive\",\n" + " \"symbols\":[\"BLUE\",\"RED\",\"GREEN\"]}";
Schema schema=AvroSerdeUtils.getSchemaFor(type);
String field="{ \"name\":\"nullableenum\", \"type\": [\"null\", " + schema + "] }";
GenericEnumSymbol symbol=new GenericData.EnumSymbol(schema,enum1.BLUE.toString());
GenericRecord r=serializeAndDeserialize(field,"nullableenum",symbol);
assertEquals(enum1.BLUE,enum1.valueOf(r.get("nullableenum").toString()));
r=serializeAndDeserialize(field,"nullableenum",null);
assertNull(r.get("nullableenum"));
}
APIUtilityVerifier InternalCallVerifier IdentityVerifier EqualityVerifier HybridVerifier
@Test public void canSerializeArraysWithNullablePrimitiveElements() throws SerDeException, IOException {
final String field="{ \"name\":\"listWithNulls\", \"type\": " + "{\"type\":\"array\", \"items\": [\"null\", \"int\"]} }";
List intList=new ArrayList();
Collections.addAll(intList,1,2,null,3);
GenericRecord r=serializeAndDeserialize(field,"listWithNulls",intList);
Object result=r.get("listWithNulls");
assertNotSame(intList,result);
assertEquals(intList,result);
}
APIUtilityVerifier InternalCallVerifier IdentityVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void canSerializeNullableLists() throws SerDeException, IOException {
List intList=new ArrayList();
Collections.addAll(intList,1,2,3);
String field="{ \"name\":\"nullableList\", \"type\": [\"null\", " + "{\"type\":\"array\", \"items\":\"int\"}] }";
GenericRecord r=serializeAndDeserialize(field,"nullableList",intList);
Object result=r.get("nullableList");
assertNotSame(intList,result);
assertEquals(intList,result);
r=serializeAndDeserialize(field,"nullableList",null);
assertNull(r.get("nullableList"));
}
APIUtilityVerifier EqualityVerifier
@Test public void canSerializeFixed() throws SerDeException, IOException {
String field="{ \"name\":\"fixed1\", \"type\":{\"type\":\"fixed\", " + "\"name\":\"threebytes\", \"size\":3} }";
GenericData.Fixed fixed=new GenericData.Fixed(buildSchema(field),"k9@".getBytes());
GenericRecord r=serializeAndDeserialize(field,"fixed1",fixed);
assertArrayEquals(fixed.bytes(),((GenericData.Fixed)r.get("fixed1")).bytes());
}
APIUtilityVerifier EqualityVerifier
@Test public void canSerializeEnums() throws SerDeException, IOException {
String type="{\"type\": \"enum\", \"name\": \"enum1_values\", " + "\"symbols\":[\"BLUE\",\"RED\",\"GREEN\"]}";
Schema schema=AvroSerdeUtils.getSchemaFor(type);
String field="{ \"name\":\"enum1\", \"type\": " + schema + " }";
for ( enum1 e : enum1.values()) {
GenericEnumSymbol symbol=new GenericData.EnumSymbol(schema,e.toString());
GenericRecord r=serializeAndDeserialize(field,"enum1",symbol);
assertEquals(e,enum1.valueOf(r.get("enum1").toString()));
}
}
APIUtilityVerifier InternalCallVerifier IdentityVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void canSerializeNullableDecimals() throws SerDeException, IOException {
String field="{ \"name\":\"nullableBytes\", \"type\":[\"null\", " + "{\"type\":\"bytes\", \"logicalType\":\"decimal\", \"precision\":5, \"scale\":4}] }";
Buffer bb=AvroSerdeUtils.getBufferFromDecimal(HiveDecimal.create("3.1416"),4);
GenericRecord r=serializeAndDeserialize(field,"nullableBytes",bb);
Object result=r.get("nullableBytes");
assertNotSame(bb,result);
assertEquals(bb,result);
r=serializeAndDeserialize(field,"nullableBytes",null);
assertNull(r.get("nullableBytes"));
}
APIUtilityVerifier InternalCallVerifier IdentityVerifier EqualityVerifier HybridVerifier
@Test public void canSerializeArraysWithNullableComplexElements() throws SerDeException, IOException {
final String field="{ \"name\":\"listOfNullableLists\", \"type\": " + "{\"type\":\"array\", \"items\": [\"null\", " + "{\"type\": \"array\", \"items\": \"int\"}]} }";
List> intListList=new ArrayList>();
List intList=new ArrayList();
Collections.addAll(intList,1,2,3);
Collections.addAll(intListList,intList,null);
GenericRecord r=serializeAndDeserialize(field,"listOfNullableLists",intListList);
Object result=r.get("listOfNullableLists");
assertNotSame(intListList,result);
assertEquals(intListList,result);
}
APIUtilityVerifier EqualityVerifier
@Test public void canSerializeBytes() throws SerDeException, IOException {
String field="{ \"name\":\"bytes1\", \"type\":\"bytes\" }";
ByteBuffer bb=ByteBuffer.wrap("easy as one two three".getBytes());
bb.rewind();
GenericRecord r=serializeAndDeserialize(field,"bytes1",bb);
assertEquals(bb,r.get("bytes1"));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier IdentityVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void canSerializeNullableFixed() throws SerDeException, IOException {
String field="{ \"name\":\"nullableFixed\", \"type\": [\"null\", " + "{\"type\":\"fixed\", \"name\":\"threebytes\", \"size\":3}] }";
Schema s=buildSchema(field);
Schema nullable=s.getField("nullableFixed").schema();
assertTrue(AvroSerdeUtils.isNullableType(nullable));
GenericData.Fixed fixed=new GenericData.Fixed(AvroSerdeUtils.getOtherTypeFromNullableType(nullable),"k9@".getBytes());
GenericRecord r=serializeAndDeserialize(field,"nullableFixed",fixed);
GenericData.Fixed result=(GenericData.Fixed)r.get("nullableFixed");
assertNotSame(fixed,result);
assertArrayEquals(fixed.bytes(),result.bytes());
r=serializeAndDeserialize(field,"nullableFixed",null);
assertNull(r.get("nullableFixed"));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void canSerializeMapsWithNullablePrimitiveValues() throws SerDeException, IOException {
String field="{ \"name\":\"mapWithNulls\", \"type\": " + "{\"type\":\"map\", \"values\": [\"null\", \"boolean\"]} }";
Map m=new HashMap();
m.put("yes",true);
m.put("no",false);
m.put("maybe",null);
GenericRecord r=serializeAndDeserialize(field,"mapWithNulls",m);
Object result=r.get("mapWithNulls");
assertEquals(m,result);
}
APIUtilityVerifier InternalCallVerifier IdentityVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void canSerializeNullableBytes() throws SerDeException, IOException {
String field="{ \"name\":\"nullableBytes\", \"type\":[\"null\", \"bytes\"] }";
ByteBuffer bb=ByteBuffer.wrap("easy as one two three".getBytes());
bb.rewind();
GenericRecord r=serializeAndDeserialize(field,"nullableBytes",bb);
Object result=r.get("nullableBytes");
assertNotSame(bb,result);
assertEquals(bb,result);
r=serializeAndDeserialize(field,"nullableBytes",null);
assertNull(r.get("nullableBytes"));
}
APIUtilityVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void canSerializeNullablePrimitiveTypes() throws SerDeException, IOException {
String field="{ \"name\":\"nullableint\", \"type\":[\"int\", \"null\"] }";
GenericRecord r=serializeAndDeserialize(field,"nullableint",42);
assertEquals(42,r.get("nullableint"));
r=serializeAndDeserialize(field,"nullableint",null);
assertNull(r.get("nullableint"));
}
APIUtilityVerifier InternalCallVerifier IdentityVerifier EqualityVerifier HybridVerifier
@Test public void canSerializeRecordsWithNullableComplexElements() throws SerDeException, IOException {
String field="{ \"name\":\"struct1\", \"type\":{\"type\":\"record\", " + "\"name\":\"struct1_name\", \"fields\": [\n" + "{ \"name\":\"sInt\", \"type\":\"int\" }, { \"name\""+ ":\"sBoolean\", \"type\":\"boolean\" }, { \"name\":\"nullableList\", \"type\":[\"null\", "+ "{ \"type\":\"array\", \"items\":\"int\"}] } ] } }";
Schema s=buildSchema(field);
GenericData.Record innerRecord=new GenericData.Record(s.getField("struct1").schema());
innerRecord.put("sInt",77);
innerRecord.put("sBoolean",false);
List intList=new ArrayList();
Collections.addAll(intList,1,2,3);
innerRecord.put("nullableList",intList);
GenericRecord r=serializeAndDeserialize(field,"struct1",innerRecord);
Object result=r.get("struct1");
assertNotSame(innerRecord,result);
assertEquals(innerRecord,result);
innerRecord.put("nullableList",null);
r=serializeAndDeserialize(field,"struct1",innerRecord);
result=r.get("struct1");
assertNotSame(innerRecord,result);
assertEquals(innerRecord,result);
}
APIUtilityVerifier EqualityVerifier
@Test public void canSerializeMaps() throws SerDeException, IOException {
Map m=new HashMap();
m.put("yes",true);
m.put("no",false);
String field="{ \"name\":\"map1\", \"type\":{\"type\":\"map\", \"values\":\"boolean\"} }";
GenericRecord r=serializeAndDeserialize(field,"map1",m);
assertEquals(m,r.get("map1"));
}
APIUtilityVerifier InternalCallVerifier IdentityVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void canSerializeNullableMaps() throws SerDeException, IOException {
String field="{ \"name\":\"nullableMap\", \"type\": [\"null\", " + "{\"type\":\"map\", \"values\":\"boolean\"}] }";
Map m=new HashMap();
m.put("yes",true);
m.put("no",false);
GenericRecord r=serializeAndDeserialize(field,"nullableMap",m);
Object result=r.get("nullableMap");
assertNotSame(m,result);
assertEquals(m,result);
r=serializeAndDeserialize(field,"nullableMap",null);
assertNull(r.get("nullableMap"));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void canSerializeStructs() throws SerDeException {
String field="{ \"name\":\"struct1\", \"type\":{\"type\":\"record\", " + "\"name\":\"struct1_name\", \"fields\": [\n" + "{ \"name\":\"sInt\", \"type\":\"int\" }, { \"name\""+ ":\"sBoolean\", \"type\":\"boolean\" }, { \"name\":\"sString\", \"type\":\"string\" } ] } }";
Schema s=buildSchema(field);
GenericData.Record innerRecord=new GenericData.Record(s.getField("struct1").schema());
innerRecord.put("sInt",77);
innerRecord.put("sBoolean",false);
innerRecord.put("sString","tedious");
GenericData.Record r=new GenericData.Record(s);
r.put("struct1",innerRecord);
AvroSerializer as=new AvroSerializer();
AvroDeserializer ad=new AvroDeserializer();
AvroObjectInspectorGenerator aoig=new AvroObjectInspectorGenerator(s);
ObjectInspector oi=aoig.getObjectInspector();
List columnNames=aoig.getColumnNames();
List columnTypes=aoig.getColumnTypes();
AvroGenericRecordWritable agrw=new AvroGenericRecordWritable(r);
agrw.setFileSchema(r.getSchema());
Object obj=ad.deserialize(columnNames,columnTypes,agrw,s);
Writable result=as.serialize(obj,oi,columnNames,columnTypes,s);
assertTrue(result instanceof AvroGenericRecordWritable);
GenericRecord r2=((AvroGenericRecordWritable)result).getRecord();
assertEquals(s,r2.getSchema());
GenericRecord r3=(GenericRecord)r2.get("struct1");
assertEquals(77,r3.get("sInt"));
assertEquals(false,r3.get("sBoolean"));
assertEquals("tedious",r3.get("sString"));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void canSerializeCyclesInSchema() throws SerDeException, IOException {
AvroCycleParent parent=new AvroCycleParent();
AvroCycleChild child=new AvroCycleChild();
parent.setChild(child);
Schema parentS=ReflectData.AllowNull.get().getSchema(AvroCycleParent.class);
GenericData.Record parentRec=new GenericData.Record(parentS);
Schema childS=ReflectData.AllowNull.get().getSchema(AvroCycleChild.class);
GenericData.Record childRec=new GenericData.Record(childS);
parentRec.put("child",childRec);
AvroSerializer as=new AvroSerializer();
AvroDeserializer ad=new AvroDeserializer();
AvroObjectInspectorGenerator aoig=new AvroObjectInspectorGenerator(parentS);
ObjectInspector oi=aoig.getObjectInspector();
List columnNames=aoig.getColumnNames();
List columnTypes=aoig.getColumnTypes();
AvroGenericRecordWritable agrw=Utils.serializeAndDeserializeRecord(parentRec);
Object obj=ad.deserialize(columnNames,columnTypes,agrw,parentS);
Writable result=as.serialize(obj,oi,columnNames,columnTypes,parentS);
assertTrue(result instanceof AvroGenericRecordWritable);
GenericRecord r2=((AvroGenericRecordWritable)result).getRecord();
assertEquals(parentS,r2.getSchema());
}
APIUtilityVerifier EqualityVerifier
@Test public void canSerializeMapOfDecimals() throws SerDeException, IOException {
Map m=new HashMap();
m.put("yes",AvroSerdeUtils.getBufferFromDecimal(HiveDecimal.create("3.14"),4));
m.put("no",AvroSerdeUtils.getBufferFromDecimal(HiveDecimal.create("6.2832732"),4));
String field="{ \"name\":\"map1\", \"type\":{\"type\":\"map\"," + " \"values\":{\"type\":\"bytes\", \"logicalType\":\"decimal\", \"precision\":5, \"scale\":4}} }";
GenericRecord r=serializeAndDeserialize(field,"map1",m);
assertEquals(m,r.get("map1"));
}
APIUtilityVerifier EqualityVerifier
@Test public void canSerializeListOfDecimals() throws SerDeException, IOException {
List bbList=new ArrayList();
String[] decs=new String[]{"3.1416","4.7779","0.2312","9.1000","5.5555"};
for (int i=0; i < decs.length; i++) {
bbList.add(AvroSerdeUtils.getBufferFromDecimal(HiveDecimal.create(decs[i]),4));
}
String field="{ \"name\":\"list1\", \"type\":{\"type\":\"array\"," + " \"items\":{\"type\":\"bytes\", \"logicalType\":\"decimal\", \"precision\":5, \"scale\":4}} }";
GenericRecord r=serializeAndDeserialize(field,"list1",bbList);
assertEquals(bbList,r.get("list1"));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier IdentityVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void canSerializeNullableRecords() throws SerDeException, IOException {
String field="{ \"name\":\"nullableStruct\", \"type\": [\"null\", {\"type\":\"record\", " + "\"name\":\"struct1_name\", \"fields\": [\n" + "{ \"name\":\"sInt\", \"type\":\"int\" }, "+ "{ \"name\":\"sBoolean\", \"type\":\"boolean\" }, "+ "{ \"name\":\"sString\", \"type\":\"string\" } ] }] }";
Schema s=buildSchema(field);
Schema nullable=s.getField("nullableStruct").schema();
assertTrue(AvroSerdeUtils.isNullableType(nullable));
GenericData.Record innerRecord=new GenericData.Record(AvroSerdeUtils.getOtherTypeFromNullableType(nullable));
innerRecord.put("sInt",77);
innerRecord.put("sBoolean",false);
innerRecord.put("sString","tedious");
GenericRecord r=serializeAndDeserialize(field,"nullableStruct",innerRecord);
Object result=r.get("nullableStruct");
assertNotSame(innerRecord,result);
assertEquals(innerRecord,result);
r=serializeAndDeserialize(field,"nullableStruct",null);
assertNull(r.get("nullableStruct"));
}
APIUtilityVerifier InternalCallVerifier IdentityVerifier EqualityVerifier HybridVerifier
@Test public void canSerializeMapsWithNullableComplexValues() throws SerDeException, IOException {
String field="{ \"name\":\"mapWithNullableLists\", \"type\": " + "{\"type\":\"map\", \"values\": [\"null\", " + "{\"type\": \"array\", \"items\": \"int\"}]} }";
Map> m=new HashMap>();
List intList=new ArrayList();
Collections.addAll(intList,1,2,3);
m.put("list",intList);
m.put("null",null);
GenericRecord r=serializeAndDeserialize(field,"mapWithNullableLists",m);
Object result=r.get("mapWithNullableLists");
assertNotSame(m,result);
assertEquals(m,result);
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void canSerializeLists() throws SerDeException, IOException {
List intList=new ArrayList();
Collections.addAll(intList,1,2,3);
String field="{ \"name\":\"list1\", \"type\":{\"type\":\"array\", \"items\":\"int\"} }";
GenericRecord r=serializeAndDeserialize(field,"list1",intList);
final Object list1=r.get("list1");
Assert.assertTrue(list1 instanceof GenericArray);
Assert.assertTrue(list1 instanceof List);
assertEquals(intList,list1);
}
Class: org.apache.hadoop.hive.serde2.avro.TestGenericAvroRecordWritable APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void writableContractIsImplementedCorrectly() throws IOException {
Schema schema=AvroSerdeUtils.getSchemaFor(schemaJSON);
GenericRecord gr=new GenericData.Record(schema);
gr.put("first","The");
gr.put("last","Doctor");
assertEquals("The",gr.get("first"));
assertEquals("Doctor",gr.get("last"));
AvroGenericRecordWritable garw=new AvroGenericRecordWritable(gr);
garw.setFileSchema(gr.getSchema());
garw.setRecordReaderID(new UID());
ByteArrayOutputStream baos=new ByteArrayOutputStream();
DataOutputStream daos=new DataOutputStream(baos);
garw.write(daos);
AvroGenericRecordWritable garw2=new AvroGenericRecordWritable(gr);
garw.setFileSchema(gr.getSchema());
garw2.setRecordReaderID(new UID());
ByteArrayInputStream bais=new ByteArrayInputStream(baos.toByteArray());
DataInputStream dais=new DataInputStream(bais);
garw2.readFields(dais);
GenericRecord gr2=garw2.getRecord();
assertEquals("The",gr2.get("first").toString());
assertEquals("Doctor",gr2.get("last").toString());
}
Class: org.apache.hadoop.hive.serde2.avro.TestSchemaReEncoder APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void schemasCanAddFields() throws SerDeException {
String original="{\n" + " \"namespace\": \"org.apache.hadoop.hive\",\n" + " \"name\": \"Line\",\n"+ " \"type\": \"record\",\n"+ " \"fields\": [\n"+ " {\n"+ " \"name\":\"text\",\n"+ " \"type\":\"string\"\n"+ " }\n"+ " ]\n"+ "}";
String evolved="{\n" + " \"namespace\": \"org.apache.hadoop.hive\",\n" + " \"name\": \"Line\",\n"+ " \"type\": \"record\",\n"+ " \"fields\": [\n"+ " {\n"+ " \"name\":\"text\",\n"+ " \"type\":\"string\"\n"+ " },\n"+ " {\n"+ " \"name\":\"new_kid\",\n"+ " \"type\":\"string\",\n"+ " \"default\":\"Hi!\"\n"+ " }\n"+ " ]\n"+ "}";
Schema originalSchema=AvroSerdeUtils.getSchemaFor(original);
Schema evolvedSchema=AvroSerdeUtils.getSchemaFor(evolved);
GenericRecord record=new GenericData.Record(originalSchema);
record.put("text","it is a far better thing I do, yadda, yadda");
assertTrue(GenericData.get().validate(originalSchema,record));
AvroDeserializer.SchemaReEncoder schemaReEncoder=new AvroDeserializer.SchemaReEncoder(record.getSchema(),evolvedSchema);
GenericRecord r2=schemaReEncoder.reencode(record);
assertTrue(GenericData.get().validate(evolvedSchema,r2));
assertEquals("Hi!",r2.get("new_kid").toString());
String original2="{\n" + " \"namespace\": \"somebody.else\",\n" + " \"name\": \"something_else\",\n"+ " \"type\": \"record\",\n"+ " \"fields\": [\n"+ " {\n"+ " \"name\":\"a\",\n"+ " \"type\":\"int\"\n"+ " }\n"+ " ]\n"+ "}";
String evolved2="{\n" + " \"namespace\": \"somebody.else\",\n" + " \"name\": \"something_else\",\n"+ " \"type\": \"record\",\n"+ " \"fields\": [\n"+ " {\n"+ " \"name\":\"a\",\n"+ " \"type\":\"int\"\n"+ " },\n"+ " {\n"+ " \"name\":\"b\",\n"+ " \"type\":\"long\",\n"+ " \"default\":42\n"+ " }\n"+ " ]\n"+ "}";
Schema originalSchema2=AvroSerdeUtils.getSchemaFor(original2);
Schema evolvedSchema2=AvroSerdeUtils.getSchemaFor(evolved2);
record=new GenericData.Record(originalSchema2);
record.put("a",19);
assertTrue(GenericData.get().validate(originalSchema2,record));
schemaReEncoder=new AvroDeserializer.SchemaReEncoder(record.getSchema(),evolvedSchema2);
r2=schemaReEncoder.reencode(record);
assertTrue(GenericData.get().validate(evolvedSchema2,r2));
assertEquals(42l,r2.get("b"));
}
Class: org.apache.hadoop.hive.serde2.avro.TestThatEvolvedSchemasActAsWeWant APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void resolvedSchemasShouldReturnReaderSchema() throws IOException {
String v0="{\n" + " \"namespace\": \"org.apache.hadoop.hive\",\n" + " \"name\": \"SomeStuff\",\n"+ " \"type\": \"record\",\n"+ " \"fields\": [\n"+ " {\n"+ " \"name\":\"v0\",\n"+ " \"type\":\"string\"\n"+ " }\n"+ " ]\n"+ "}";
String v1="{\n" + " \"namespace\": \"org.apache.hadoop.hive\",\n" + " \"name\": \"SomeStuff\",\n"+ " \"type\": \"record\",\n"+ " \"fields\": [\n"+ " {\n"+ " \"name\":\"v0\",\n"+ " \"type\":\"string\"\n"+ " },\n"+ " {\n"+ " \"name\":\"v1\",\n"+ " \"type\":\"string\",\n"+ " \"default\":\"v1_default\""+ " }\n"+ " ]\n"+ "}";
Schema[] schemas={AvroSerdeUtils.getSchemaFor(v0),AvroSerdeUtils.getSchemaFor(v1)};
GenericRecord record=new GenericData.Record(schemas[0]);
record.put("v0","v0 value");
assertTrue(GenericData.get().validate(schemas[0],record));
GenericDatumWriter gdw=new GenericDatumWriter(schemas[0]);
DataFileWriter dfw=new DataFileWriter(gdw);
ByteArrayOutputStream baos=new ByteArrayOutputStream();
dfw.create(schemas[0],baos);
dfw.append(record);
dfw.close();
ByteArrayInputStream bais=new ByteArrayInputStream(baos.toByteArray());
GenericDatumReader gdr=new GenericDatumReader();
gdr.setExpected(schemas[1]);
DataFileStream dfs=new DataFileStream(bais,gdr);
assertTrue(dfs.hasNext());
GenericRecord next=dfs.next();
assertEquals("v0 value",next.get("v0").toString());
assertEquals("v1_default",next.get("v1").toString());
assertEquals(schemas[1],next.getSchema());
}
Class: org.apache.hadoop.hive.serde2.avro.TestTypeInfoToSchema APIUtilityVerifier EqualityVerifier
@Test public void createAvroBooleanSchema(){
final String specificSchema="\"boolean\"";
String expectedSchema=genSchema(specificSchema);
Assert.assertEquals("Test for boolean's avro schema failed",expectedSchema,getAvroSchemaString(BOOLEAN));
}
APIUtilityVerifier EqualityVerifier
@Test public void createAvroCharSchema(){
final String specificSchema="{" + "\"type\":\"string\"," + "\"logicalType\":\"char\","+ "\"maxLength\":" + CHAR_LEN + "}";
String expectedSchema=genSchema(specificSchema);
Assert.assertEquals("Test for char's avro schema failed",expectedSchema,getAvroSchemaString(CHAR));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void createAvroStructSchema() throws IOException {
StructTypeInfo structTypeInfo=new StructTypeInfo();
ArrayList names=new ArrayList();
names.add("field1");
names.add("field2");
names.add("field3");
names.add("field4");
names.add("field5");
names.add("field6");
names.add("field7");
names.add("field8");
names.add("field9");
names.add("field10");
names.add("field11");
names.add("field12");
names.add("field13");
names.add("field14");
structTypeInfo.setAllStructFieldNames(names);
ArrayList typeInfos=new ArrayList();
typeInfos.add(STRING);
typeInfos.add(CHAR);
typeInfos.add(VARCHAR);
typeInfos.add(BINARY);
typeInfos.add(BYTE);
typeInfos.add(SHORT);
typeInfos.add(INT);
typeInfos.add(LONG);
typeInfos.add(FLOAT);
typeInfos.add(DOUBLE);
typeInfos.add(BOOLEAN);
typeInfos.add(DECIMAL);
typeInfos.add(DATE);
typeInfos.add(VOID);
structTypeInfo.setAllStructFieldTypeInfos(typeInfos);
LOGGER.info("structTypeInfo is " + structTypeInfo);
final String specificSchema=IOUtils.toString(Resources.getResource("avro-struct.avsc").openStream()).replace(lineSeparator,"");
String expectedSchema=genSchema(specificSchema);
Assert.assertEquals("Test for struct's avro schema failed",expectedSchema,getAvroSchemaString(structTypeInfo));
}
APIUtilityVerifier EqualityVerifier
@Test public void createAvroDoubleSchema(){
final String specificSchema="\"double\"";
String expectedSchema=genSchema(specificSchema);
Assert.assertEquals("Test for double's avro schema failed",expectedSchema,getAvroSchemaString(DOUBLE));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void createAvroUnionSchemaWithNull(){
UnionTypeInfo unionTypeInfo=new UnionTypeInfo();
unionTypeInfo.setAllUnionObjectTypeInfos(Arrays.asList(INT,FLOAT,STRING,VOID));
final String specificSchema=Schema.createUnion(Arrays.asList(Schema.create(Schema.Type.NULL),Schema.create(Schema.Type.INT),Schema.create(Schema.Type.FLOAT),Schema.create(Schema.Type.STRING))).toString();
String expectedSchema=genSchemaWithoutNull(specificSchema);
Assert.assertEquals("Test for union's avro schema failed",expectedSchema,getAvroSchemaString(unionTypeInfo));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void createAvroMapSchema(){
MapTypeInfo mapTypeInfo=new MapTypeInfo();
mapTypeInfo.setMapKeyTypeInfo(STRING);
mapTypeInfo.setMapValueTypeInfo(INT);
final String specificSchema=Schema.createMap(Schema.createUnion(Arrays.asList(Schema.create(Schema.Type.NULL),Schema.create(Schema.Type.INT)))).toString();
String expectedSchema=genSchema(specificSchema);
Assert.assertEquals("Test for map's avro schema failed",expectedSchema,getAvroSchemaString(mapTypeInfo));
}
APIUtilityVerifier EqualityVerifier
@Test public void createAvroDateSchema(){
final String specificSchema="{" + "\"type\":\"int\"," + "\"logicalType\":\"date\"}";
String expectedSchema=genSchema(specificSchema);
Assert.assertEquals("Test for date in avro schema failed",expectedSchema,getAvroSchemaString(DATE));
}
APIUtilityVerifier EqualityVerifier
@Test public void createAvroVarcharSchema(){
final String specificSchema="{" + "\"type\":\"string\"," + "\"logicalType\":\"varchar\","+ "\"maxLength\":" + CHAR_LEN + "}";
String expectedSchema=genSchema(specificSchema);
Assert.assertEquals("Test for varchar's avro schema failed",expectedSchema,getAvroSchemaString(VARCHAR));
}
APIUtilityVerifier EqualityVerifier
@Test public void createAvroBinarySchema(){
final String specificSchema="\"bytes\"";
String expectedSchema=genSchema(specificSchema);
Assert.assertEquals("Test for binary's avro schema failed",expectedSchema,getAvroSchemaString(BINARY));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void createAvroUnionSchemaOfNull(){
UnionTypeInfo unionTypeInfo=new UnionTypeInfo();
unionTypeInfo.setAllUnionObjectTypeInfos(Arrays.asList(VOID));
final String specificSchema=Schema.createUnion(Arrays.asList(Schema.create(Schema.Type.NULL))).toString();
String expectedSchema=genSchemaWithoutNull(specificSchema);
Assert.assertEquals("Test for union's avro schema failed",expectedSchema,getAvroSchemaString(unionTypeInfo));
}
APIUtilityVerifier EqualityVerifier
@Test public void createAvroBytesSchema(){
final String specificSchema="\"int\"";
String expectedSchema=genSchema(specificSchema);
Assert.assertEquals("Test for bytes's avro schema failed",expectedSchema,getAvroSchemaString(BYTE));
}
APIUtilityVerifier EqualityVerifier
@Test public void createAvroShortSchema(){
final String specificSchema="\"int\"";
String expectedSchema=genSchema(specificSchema);
Assert.assertEquals("Test for short's avro schema failed",expectedSchema,getAvroSchemaString(SHORT));
}
APIUtilityVerifier EqualityVerifier
@Test public void createAvroDecimalSchema(){
final String specificSchema="{" + "\"type\":\"bytes\"," + "\"logicalType\":\"decimal\","+ "\"precision\":" + PRECISION + ","+ "\"scale\":"+ SCALE+ "}";
String expectedSchema=genSchema(specificSchema);
Assert.assertEquals("Test for decimal's avro schema failed",expectedSchema,getAvroSchemaString(DECIMAL));
}
APIUtilityVerifier EqualityVerifier
@Test public void createAvroLongSchema(){
final String specificSchema="\"long\"";
String expectedSchema=genSchema(specificSchema);
Assert.assertEquals("Test for long's avro schema failed",expectedSchema,getAvroSchemaString(LONG));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void createAvroNestedStructSchema() throws IOException {
StructTypeInfo structTypeInfo=new StructTypeInfo();
ArrayList names=new ArrayList();
names.add("field1");
names.add("field2");
structTypeInfo.setAllStructFieldNames(names);
ArrayList typeInfos=new ArrayList();
typeInfos.add(STRING);
typeInfos.add(INT);
structTypeInfo.setAllStructFieldTypeInfos(typeInfos);
StructTypeInfo superStructTypeInfo=new StructTypeInfo();
ArrayList superNames=new ArrayList();
superNames.add("superfield1");
superNames.add("superfield2");
superStructTypeInfo.setAllStructFieldNames(superNames);
ArrayList superTypeInfos=new ArrayList();
superTypeInfos.add(STRING);
superTypeInfos.add(structTypeInfo);
superStructTypeInfo.setAllStructFieldTypeInfos(superTypeInfos);
final String specificSchema=IOUtils.toString(Resources.getResource("avro-nested-struct.avsc").openStream()).replace(lineSeparator,"");
String expectedSchema=genSchema(specificSchema);
Assert.assertEquals("Test for nested struct's avro schema failed",expectedSchema,getAvroSchemaString(superStructTypeInfo));
}
APIUtilityVerifier EqualityVerifier
@Test public void createAvroTimestampSchema(){
final String specificSchema="{" + "\"type\":\"long\"," + "\"logicalType\":\"timestamp-millis\"}";
String expectedSchema=genSchema(specificSchema);
Assert.assertEquals("Test for timestamp in avro schema failed",expectedSchema,getAvroSchemaString(TIMESTAMP));
}
APIUtilityVerifier EqualityVerifier
@Test public void createAvroStringSchema(){
final String specificSchema="\"string\"";
String expectedSchema=genSchema(specificSchema);
Assert.assertEquals("Test for string's avro schema failed",expectedSchema,getAvroSchemaString(STRING));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void createAvroUnionSchema(){
UnionTypeInfo unionTypeInfo=new UnionTypeInfo();
unionTypeInfo.setAllUnionObjectTypeInfos(Arrays.asList(INT,FLOAT,STRING));
final String specificSchema=Schema.createUnion(Arrays.asList(Schema.create(Schema.Type.NULL),Schema.create(Schema.Type.INT),Schema.create(Schema.Type.FLOAT),Schema.create(Schema.Type.STRING))).toString();
String expectedSchema=genSchemaWithoutNull(specificSchema);
Assert.assertEquals("Test for union's avro schema failed",expectedSchema,getAvroSchemaString(unionTypeInfo));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void createAvroUnionSchemaOfOne(){
UnionTypeInfo unionTypeInfo=new UnionTypeInfo();
unionTypeInfo.setAllUnionObjectTypeInfos(Arrays.asList(STRING));
final String specificSchema=Schema.createUnion(Arrays.asList(Schema.create(Schema.Type.NULL),Schema.create(Schema.Type.STRING))).toString();
String expectedSchema=genSchemaWithoutNull(specificSchema);
Assert.assertEquals("Test for union's avro schema failed",expectedSchema,getAvroSchemaString(unionTypeInfo));
}
APIUtilityVerifier EqualityVerifier
@Test public void createAvroFloatSchema(){
final String specificSchema="\"float\"";
String expectedSchema=genSchema(specificSchema);
Assert.assertEquals("Test for float's avro schema failed",expectedSchema,getAvroSchemaString(FLOAT));
}
APIUtilityVerifier EqualityVerifier
@Test public void createAvroVoidSchema(){
final String specificSchema="\"null\"";
String expectedSchema=genSchemaWithoutNull(specificSchema);
Assert.assertEquals("Test for void's avro schema failed",expectedSchema,getAvroSchemaString(VOID));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void createAvroListSchema(){
ListTypeInfo listTypeInfo=new ListTypeInfo();
listTypeInfo.setListElementTypeInfo(STRING);
final String specificSchema=Schema.createArray(Schema.createUnion(Arrays.asList(Schema.create(Schema.Type.NULL),Schema.create(Schema.Type.STRING)))).toString();
String expectedSchema=genSchema(specificSchema);
Assert.assertEquals("Test for list's avro schema failed",expectedSchema,getAvroSchemaString(listTypeInfo));
}
APIUtilityVerifier EqualityVerifier
@Test public void createAvroIntSchema(){
final String specificSchema="\"int\"";
String expectedSchema=genSchema(specificSchema);
Assert.assertEquals("Test for int's avro schema failed",expectedSchema,getAvroSchemaString(INT));
}
Class: org.apache.hadoop.hive.serde2.columnar.TestBytesRefArrayWritable InternalCallVerifier EqualityVerifier
@Test public void testCompareTo(){
int a=left.compareTo(right);
int b=right.compareTo(left);
Assert.assertEquals("a.compareTo(b) should be equal to -b.compareTo(a)",a,-b);
Assert.assertEquals("An object must be equal to itself",0,left.compareTo(left));
}
Class: org.apache.hadoop.hive.serde2.io.TestDateWritable InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testConstructor(){
Date date=Date.valueOf(getRandomDateString());
DateWritable dw1=new DateWritable(date);
DateWritable dw2=new DateWritable(dw1);
DateWritable dw3=new DateWritable(dw1.getDays());
assertEquals(dw1,dw1);
assertEquals(dw1,dw2);
assertEquals(dw2,dw3);
assertEquals(date,dw1.get());
assertEquals(date,dw2.get());
assertEquals(date,dw3.get());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testComparison(){
Date date1=Date.valueOf(getRandomDateString());
Date date2=Date.valueOf(getRandomDateString());
while (date1.equals(date2)) {
date2=Date.valueOf(getRandomDateString());
}
DateWritable dw1=new DateWritable(date1);
DateWritable dw2=new DateWritable(date2);
DateWritable dw3=new DateWritable(date1);
assertTrue("Dates should be equal",dw1.equals(dw1));
assertTrue("Dates should be equal",dw1.equals(dw3));
assertTrue("Dates should be equal",dw3.equals(dw1));
assertEquals("Dates should be equal",0,dw1.compareTo(dw1));
assertEquals("Dates should be equal",0,dw1.compareTo(dw3));
assertEquals("Dates should be equal",0,dw3.compareTo(dw1));
assertFalse("Dates not should be equal",dw1.equals(dw2));
assertFalse("Dates not should be equal",dw2.equals(dw1));
assertTrue("Dates not should be equal",0 != dw1.compareTo(dw2));
assertTrue("Dates not should be equal",0 != dw2.compareTo(dw1));
}
InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testWritableMethods() throws Throwable {
DateWritable dw1=new DateWritable(Date.valueOf(getRandomDateString()));
DateWritable dw2=new DateWritable();
ByteArrayOutputStream byteStream=new ByteArrayOutputStream();
DataOutput out=new DataOutputStream(byteStream);
dw1.write(out);
dw2.readFields(new DataInputStream(new ByteArrayInputStream(byteStream.toByteArray())));
assertEquals("Dates should be equal",dw1,dw2);
}
InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testGettersSetters(){
Date date1=Date.valueOf(getRandomDateString());
Date date2=Date.valueOf(getRandomDateString());
Date date3=Date.valueOf(getRandomDateString());
DateWritable dw1=new DateWritable(date1);
DateWritable dw2=new DateWritable(date2);
DateWritable dw3=new DateWritable(date3);
DateWritable dw4=new DateWritable();
assertEquals(date1,dw1.get());
assertEquals(date1.getTime() / 1000,dw1.getTimeInSeconds());
dw4.set(Date.valueOf("1970-01-02"));
assertEquals(1,dw4.getDays());
dw4.set(Date.valueOf("1971-01-01"));
assertEquals(365,dw4.getDays());
dw4.set(dw1.getDays());
assertEquals(dw1,dw4);
dw4.set(dw2.get());
assertEquals(dw2,dw4);
dw4.set(dw3);
assertEquals(dw3,dw4);
}
APIUtilityVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testDateValueOf(){
String dateStr=getRandomDateString();
Date date=Date.valueOf(dateStr);
assertEquals(dateStr,date.toString());
}
APIUtilityVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testDaylightSavingsTime() throws InterruptedException, ExecutionException {
String[] timeZones={"GMT","UTC","America/Godthab","America/Los_Angeles","Asia/Jerusalem","Australia/Melbourne","Europe/London","America/St_Johns","Asia/Tehran"};
for ( String timeZone : timeZones) {
TimeZone previousDefault=TimeZone.getDefault();
TimeZone.setDefault(TimeZone.getTimeZone(timeZone));
assertEquals("Default timezone should now be " + timeZone,timeZone,TimeZone.getDefault().getID());
ExecutorService threadPool=Executors.newFixedThreadPool(1);
try {
Future future=threadPool.submit(new DateTestCallable());
String result=future.get();
assertNull("Failed at timezone " + timeZone + ", date "+ result,result);
}
finally {
threadPool.shutdown();
TimeZone.setDefault(previousDefault);
}
}
}
Class: org.apache.hadoop.hive.serde2.io.TestHiveCharWritable BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testComparison() throws Exception {
HiveCharWritable hcw1=new HiveCharWritable();
HiveCharWritable hcw2=new HiveCharWritable();
hcw1.set("abcd",4);
hcw2.set("abcd",4);
assertEquals(hcw1,hcw2);
assertEquals(hcw2,hcw1);
assertEquals(0,hcw1.compareTo(hcw2));
assertEquals(0,hcw2.compareTo(hcw1));
hcw1.set("abcd",4);
hcw2.set("abc",4);
assertFalse(hcw1.equals(hcw2));
assertFalse(hcw2.equals(hcw1));
assertFalse(0 == hcw1.compareTo(hcw2));
assertFalse(0 == hcw2.compareTo(hcw1));
hcw1.set("abcd ",10);
hcw2.set("abcd",4);
assertEquals("abcd ",hcw1.toString());
assertEquals(hcw1,hcw2);
assertEquals(hcw2,hcw1);
assertEquals(0,hcw1.compareTo(hcw2));
assertEquals(0,hcw2.compareTo(hcw1));
hcw1.set(" abcd",5);
hcw2.set("abcd",5);
assertFalse(hcw1.equals(hcw2));
assertFalse(hcw2.equals(hcw1));
assertFalse(0 == hcw1.compareTo(hcw2));
assertFalse(0 == hcw2.compareTo(hcw1));
}
InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testEnforceMaxLength(){
HiveCharWritable hcw1=new HiveCharWritable();
hcw1.set("abcdefghij",10);
assertEquals("abcdefghij",hcw1.toString());
hcw1.enforceMaxLength(12);
assertEquals("abcdefghij ",hcw1.toString());
hcw1.enforceMaxLength(5);
assertEquals("abcde",hcw1.toString());
}
InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testSet() throws Exception {
HiveCharWritable hcw1=new HiveCharWritable();
HiveChar hc1=new HiveChar("abcd",8);
hcw1.set(hc1);
assertEquals("abcd ",hcw1.toString());
hcw1.set(hc1,10);
assertEquals("abcd ",hcw1.toString());
hcw1.set(hc1,2);
assertEquals("ab",hcw1.toString());
hcw1.set("abcd");
assertEquals("abcd",hcw1.toString());
hcw1.set("abcd ");
assertEquals("abcd ",hcw1.toString());
hcw1.set("abcd",10);
assertEquals("abcd ",hcw1.toString());
hcw1.set("abcd",2);
assertEquals("ab",hcw1.toString());
HiveCharWritable hcw2=new HiveCharWritable(hc1);
hcw1.set(hcw2);
assertEquals("abcd ",hcw1.toString());
hcw1.set(hcw2,10);
assertEquals("abcd ",hcw1.toString());
assertEquals("abcd ",hcw1.getTextValue().toString());
hcw1.set(hcw2,2);
assertEquals("ab",hcw1.toString());
assertEquals("ab",hcw1.getTextValue().toString());
}
InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testConstructor() throws Exception {
HiveCharWritable hcw1=new HiveCharWritable(new HiveChar("abc",5));
assertEquals("abc ",hcw1.toString());
HiveCharWritable hcw2=new HiveCharWritable(hcw1);
assertEquals("abc ",hcw2.toString());
}
InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testGetCharacterLength() throws Exception {
HiveCharWritable hcw=new HiveCharWritable();
hcw.set("abcd",10);
assertEquals(4,hcw.getCharacterLength());
}
InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testGetHiveChar() throws Exception {
HiveCharWritable hcw=new HiveCharWritable();
hcw.set("abcd",10);
assertEquals("abcd ",hcw.getHiveChar().toString());
}
Class: org.apache.hadoop.hive.serde2.io.TestHiveDecimalWritable InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testHive6594(){
Decimal128FastBuffer scratch=new Decimal128FastBuffer();
String[] vs=new String[]{"-4033.445769230769","6984454.211097692"};
Decimal128 d=new Decimal128(0L,(short)14);
for ( String s : vs) {
Decimal128 p=new Decimal128(s,(short)14);
d.addDestructive(p,(short)(short)14);
}
int bufferUsed=d.fastSerializeForHiveDecimal(scratch);
HiveDecimalWritable hdw=new HiveDecimalWritable();
hdw.set(scratch.getBytes(bufferUsed),d.getScale());
HiveDecimal hd=hdw.getHiveDecimal();
BigDecimal readValue=hd.bigDecimalValue();
assertEquals(d.toBigDecimal().stripTrailingZeros(),readValue.stripTrailingZeros());
}
Class: org.apache.hadoop.hive.serde2.io.TestHiveIntervalDayTimeWritable InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testGettersSetters() throws Exception {
HiveIntervalDayTimeWritable hiw1=new HiveIntervalDayTimeWritable();
hiw1.set(3,4,5,6,7);
HiveIntervalDayTime hi1=hiw1.getHiveIntervalDayTime();
assertEquals(3,hi1.getDays());
assertEquals(4,hi1.getHours());
assertEquals(5,hi1.getMinutes());
assertEquals(6,hi1.getSeconds());
assertEquals(7,hi1.getNanos());
}
EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testConstructor() throws Exception {
HiveIntervalDayTime hi1=HiveIntervalDayTime.valueOf("3 4:5:6.12345");
HiveIntervalDayTimeWritable hiw1=new HiveIntervalDayTimeWritable(hi1);
HiveIntervalDayTimeWritable hiw2=new HiveIntervalDayTimeWritable(hiw1);
assertEquals(hiw1,hiw2);
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testComparison() throws Exception {
HiveIntervalDayTimeWritable hiw0=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("2 2:2:2.22222"));
HiveIntervalDayTimeWritable hiw1=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("2 2:2:2.22222"));
HiveIntervalDayTimeWritable hiw2=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("3 2:2:2.22222"));
assertTrue(hiw1 + " equals " + hiw1,hiw1.equals(hiw1));
assertTrue(hiw1 + " equals " + hiw0,hiw1.equals(hiw0));
assertFalse(hiw1 + " equals " + hiw2,hiw1.equals(hiw2));
assertTrue(hiw1 + " compare " + hiw1,0 == hiw1.compareTo(hiw1));
assertTrue(hiw1 + " compare " + hiw0,0 == hiw1.compareTo(hiw0));
assertTrue(hiw1 + " compare " + hiw2,0 > hiw1.compareTo(hiw2));
hiw2=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("3 2:2:2.22222"));
assertTrue(hiw1 + " compare " + hiw2,0 > hiw1.compareTo(hiw2));
hiw2=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:2:2.22222"));
assertTrue(hiw1 + " compare " + hiw2,0 < hiw1.compareTo(hiw2));
hiw2=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("2 3:2:2.22222"));
assertTrue(hiw1 + " compare " + hiw2,0 > hiw1.compareTo(hiw2));
hiw2=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("2 1:2:2.22222"));
assertTrue(hiw1 + " compare " + hiw2,0 < hiw1.compareTo(hiw2));
hiw2=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("2 2:3:2.22222"));
assertTrue(hiw1 + " compare " + hiw2,0 > hiw1.compareTo(hiw2));
hiw2=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("2 2:1:2.22222"));
assertTrue(hiw1 + " compare " + hiw2,0 < hiw1.compareTo(hiw2));
hiw2=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("2 2:2:3.22222"));
assertTrue(hiw1 + " compare " + hiw2,0 > hiw1.compareTo(hiw2));
hiw2=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("2 2:2:1.22222"));
assertTrue(hiw1 + " compare " + hiw2,0 < hiw1.compareTo(hiw2));
hiw2=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("2 2:2:2.33333"));
assertTrue(hiw1 + " compare " + hiw2,0 > hiw1.compareTo(hiw2));
hiw2=new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("2 2:2:2.11111"));
assertTrue(hiw1 + " compare " + hiw2,0 < hiw1.compareTo(hiw2));
assertEquals(hiw0.hashCode(),hiw1.hashCode());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testWritableMethods() throws Exception {
HiveIntervalDayTimeWritable hiw1=new HiveIntervalDayTimeWritable();
HiveIntervalDayTimeWritable hiw2=new HiveIntervalDayTimeWritable();
hiw1.set(3,4,5,6,7);
hiw2.set(5,4,3,2,1);
assertFalse(hiw1.equals(hiw2));
ByteArrayOutputStream byteStream=new ByteArrayOutputStream();
DataOutput out=new DataOutputStream(byteStream);
hiw1.write(out);
hiw2.readFields(new DataInputStream(new ByteArrayInputStream(byteStream.toByteArray())));
assertEquals(hiw1,hiw2);
}
Class: org.apache.hadoop.hive.serde2.io.TestHiveIntervalYearMonthWritable BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testComparison() throws Exception {
HiveIntervalYearMonthWritable hiw0=new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-2"));
HiveIntervalYearMonthWritable hiw1=new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-2"));
HiveIntervalYearMonthWritable hiw2=new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("3-2"));
assertTrue(hiw1 + " equals " + hiw1,hiw1.equals(hiw1));
assertTrue(hiw1 + " equals " + hiw0,hiw1.equals(hiw0));
assertFalse(hiw1 + " equals " + hiw2,hiw1.equals(hiw2));
assertTrue(hiw1 + " compare " + hiw1,0 == hiw1.compareTo(hiw1));
assertTrue(hiw1 + " compare " + hiw0,0 == hiw1.compareTo(hiw0));
assertTrue(hiw1 + " compare " + hiw2,0 > hiw1.compareTo(hiw2));
hiw2=new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("1-2"));
assertTrue(hiw1 + " compare " + hiw2,0 < hiw1.compareTo(hiw2));
hiw2=new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-3"));
assertTrue(hiw1 + " compare " + hiw2,0 > hiw1.compareTo(hiw2));
hiw2=new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-1"));
assertTrue(hiw1 + " compare " + hiw2,0 < hiw1.compareTo(hiw2));
assertEquals(hiw0.hashCode(),hiw1.hashCode());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testWritableMethods() throws Exception {
HiveIntervalYearMonthWritable hiw1=new HiveIntervalYearMonthWritable();
HiveIntervalYearMonthWritable hiw2=new HiveIntervalYearMonthWritable();
hiw1.set(1,2);
hiw2.set(7,6);
assertFalse(hiw1.equals(hiw2));
ByteArrayOutputStream byteStream=new ByteArrayOutputStream();
DataOutput out=new DataOutputStream(byteStream);
hiw1.write(out);
hiw2.readFields(new DataInputStream(new ByteArrayInputStream(byteStream.toByteArray())));
assertEquals(hiw1,hiw2);
}
EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testConstructor() throws Exception {
HiveIntervalYearMonth hi1=HiveIntervalYearMonth.valueOf("1-2");
HiveIntervalYearMonthWritable hiw1=new HiveIntervalYearMonthWritable(hi1);
HiveIntervalYearMonthWritable hiw2=new HiveIntervalYearMonthWritable(hiw1);
assertEquals(hiw1,hiw2);
}
InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testGettersSetters() throws Exception {
HiveIntervalYearMonthWritable hiw1=new HiveIntervalYearMonthWritable();
hiw1.set(1,2);
HiveIntervalYearMonth hi1=hiw1.getHiveIntervalYearMonth();
assertEquals(1,hi1.getYears());
assertEquals(2,hi1.getMonths());
hiw1.set(new HiveIntervalYearMonth(3,4));
hi1=hiw1.getHiveIntervalYearMonth();
assertEquals(3,hi1.getYears());
assertEquals(4,hi1.getMonths());
hiw1.set(new HiveIntervalYearMonthWritable(new HiveIntervalYearMonth(5,6)));
hi1=hiw1.getHiveIntervalYearMonth();
assertEquals(5,hi1.getYears());
assertEquals(6,hi1.getMonths());
}
Class: org.apache.hadoop.hive.serde2.io.TestHiveVarcharWritable InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testEnforceLength() throws Exception {
HiveVarcharWritable vc1=new HiveVarcharWritable(new HiveVarchar("0123456789",10));
assertEquals(10,vc1.getCharacterLength());
vc1.enforceMaxLength(20);
assertEquals(10,vc1.getCharacterLength());
vc1.enforceMaxLength(10);
assertEquals(10,vc1.getCharacterLength());
vc1.enforceMaxLength(8);
assertEquals(8,vc1.getCharacterLength());
}
InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testStringLength() throws Exception {
HiveVarcharWritable vc1=new HiveVarcharWritable(new HiveVarchar("0123456789",10));
assertEquals(10,vc1.getCharacterLength());
vc1.set("012345678901234");
assertEquals(15,vc1.getCharacterLength());
vc1.set(new HiveVarcharWritable(new HiveVarchar("01234",-1)));
assertEquals(5,vc1.getCharacterLength());
vc1.set(new HiveVarchar("012345",-1));
assertEquals(6,vc1.getCharacterLength());
vc1.set("0123456",-1);
assertEquals(7,vc1.getCharacterLength());
vc1.set(new HiveVarcharWritable(new HiveVarchar("01234567",-1)),-1);
assertEquals(8,vc1.getCharacterLength());
vc1.enforceMaxLength(3);
assertEquals(3,vc1.getCharacterLength());
ByteArrayOutputStream outputBytes=new ByteArrayOutputStream();
HiveVarcharWritable vc2=new HiveVarcharWritable(new HiveVarchar("abcdef",-1));
vc2.write(new DataOutputStream(outputBytes));
vc1.readFields(new DataInputStream(new ByteArrayInputStream(outputBytes.toByteArray())));
assertEquals(6,vc1.getCharacterLength());
}
InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testStringValue() throws Exception {
HiveVarcharWritable vc1=new HiveVarcharWritable(new HiveVarchar("abcde",20));
assertEquals("abcde",vc1.toString());
assertEquals("abcde",vc1.getHiveVarchar().toString());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testComparison() throws Exception {
HiveVarcharWritable hc1=new HiveVarcharWritable(new HiveVarchar("abcd",20));
HiveVarcharWritable hc2=new HiveVarcharWritable(new HiveVarchar("abcd",20));
assertTrue(hc1.equals(hc2));
assertTrue(hc2.equals(hc1));
assertEquals(0,hc1.compareTo(hc2));
assertEquals(0,hc2.compareTo(hc1));
hc2=new HiveVarcharWritable(new HiveVarchar("abcde",20));
assertFalse(hc1.equals(hc2));
assertFalse(hc2.equals(hc1));
assertFalse(0 == hc1.compareTo(hc2));
assertFalse(0 == hc2.compareTo(hc1));
hc2=new HiveVarcharWritable(new HiveVarchar("abcd ",30));
assertFalse(hc1.equals(hc2));
assertFalse(hc2.equals(hc1));
assertFalse(0 == hc1.compareTo(hc2));
assertFalse(0 == hc2.compareTo(hc1));
hc2=new HiveVarcharWritable(new HiveVarchar(" abcd",20));
assertFalse(hc1.equals(hc2));
assertFalse(hc2.equals(hc1));
assertFalse(0 == hc1.compareTo(hc2));
assertFalse(0 == hc2.compareTo(hc1));
}
Class: org.apache.hadoop.hive.serde2.io.TestTimestampWritable EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testReverseNanos(){
assertEquals(0,reverseNanos(0));
assertEquals(120000000,reverseNanos(21));
assertEquals(32100000,reverseNanos(1230));
assertEquals(5,reverseNanos(500000000));
assertEquals(987654321,reverseNanos(123456789));
assertEquals(12345678,reverseNanos(876543210));
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testMaxSize(){
assertEquals(5,WritableUtils.getVIntSize(999999999));
assertEquals(5,WritableUtils.getVIntSize(-2 - 999999999));
assertEquals(3,WritableUtils.getVIntSize(Short.MAX_VALUE));
assertEquals(3,WritableUtils.getVIntSize(Short.MIN_VALUE));
assertTrue((((long)MAX_ADDITIONAL_SECONDS_BITS) << 31) * 1000 < Long.MAX_VALUE);
assertTrue((((double)MAX_ADDITIONAL_SECONDS_BITS + 1) * (1L << 31)) * 1000 > Long.MAX_VALUE);
assertEquals(4,WritableUtils.getVIntSize(MAX_ADDITIONAL_SECONDS_BITS));
}
APIUtilityVerifier IterativeVerifier EqualityVerifier
@Test @Concurrent(count=4) public void testDecimalToTimestampRandomly(){
Random rand=new Random(294729777L);
for (int i=0; i < 10000; ++i) {
Timestamp ts=new Timestamp(randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS,MAX_FOUR_DIGIT_YEAR_MILLIS,rand));
ts.setNanos(randomNanos(rand,9));
assertEquals(ts,TimestampWritable.decimalToTimestamp(timestampToDecimal(ts)));
}
}
APIUtilityVerifier EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testDecimalToTimestampCornerCases(){
Timestamp ts=new Timestamp(parseToMillis("1969-03-04 05:44:33"));
assertEquals(0,ts.getTime() % 1000);
for ( int nanos : new int[]{100000,900000,999100000,999900000}) {
ts.setNanos(nanos);
HiveDecimal d=timestampToDecimal(ts);
assertEquals(ts,TimestampWritable.decimalToTimestamp(d));
assertEquals(ts,TimestampWritable.doubleToTimestamp(d.bigDecimalValue().doubleValue()));
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test @Concurrent(count=4) public void testToFromDouble(){
Random rand=new Random(294729777L);
for (int nanosPrecision=0; nanosPrecision <= 4; ++nanosPrecision) {
for (int i=0; i < 10000; ++i) {
long millis=randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS,MAX_FOUR_DIGIT_YEAR_MILLIS,rand);
Timestamp ts=new Timestamp(millis);
int nanos=randomNanos(rand,nanosPrecision);
ts.setNanos(nanos);
TimestampWritable tsw=new TimestampWritable(ts);
double asDouble=tsw.getDouble();
int recoveredNanos=(int)(Math.round((asDouble - Math.floor(asDouble)) * Math.pow(10,nanosPrecision)) * Math.pow(10,9 - nanosPrecision));
assertEquals(String.format("Invalid nanosecond part recovered from %f",asDouble),nanos,recoveredNanos);
assertEquals(ts,TimestampWritable.doubleToTimestamp(asDouble));
assertEquals(ts,TimestampWritable.decimalToTimestamp(HiveDecimal.create(BigDecimal.valueOf(asDouble))));
}
}
}
EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testSerializationFormatDirectly() throws IOException {
assertEquals("1970-01-01 00:00:00",fromIntAndVInts(0).toString());
assertEquals("1970-01-01 00:00:01",fromIntAndVInts(1).toString());
assertEquals("1970-01-01 00:05:00",fromIntAndVInts(300).toString());
assertEquals("1970-01-01 02:00:00",fromIntAndVInts(7200).toString());
assertEquals("2000-01-02 03:04:05",fromIntAndVInts(946782245).toString());
assertEquals("2000-01-02 03:04:05",fromIntAndVInts(946782245,3210).toString());
assertEquals("2000-01-02 03:04:05.0123",fromIntAndVInts(946782245 | HAS_DECIMAL_MASK,3210).toString());
assertEquals("2038-01-19 03:14:07",fromIntAndVInts(Integer.MAX_VALUE).toString());
assertEquals("2038-01-19 03:14:07.012345678",fromIntAndVInts(Integer.MAX_VALUE | HAS_DECIMAL_MASK,876543210).toString());
long seconds=253392390415L;
assertEquals("9999-09-08 07:06:55",fromIntAndVInts((int)(seconds & 0x7fffffff) | (1 << 31),-1L,seconds >> 31).toString());
assertEquals("9999-09-08 07:06:55.0123",fromIntAndVInts((int)(seconds & 0x7fffffff) | (1 << 31),-3210 - 1,seconds >> 31).toString());
}
EqualityVerifier
@Test @Concurrent(count=4) @Repeating(repetition=100) public void testMillisToSeconds(){
assertEquals(0,TimestampWritable.millisToSeconds(0));
assertEquals(-1,TimestampWritable.millisToSeconds(-1));
assertEquals(-1,TimestampWritable.millisToSeconds(-999));
assertEquals(-1,TimestampWritable.millisToSeconds(-1000));
assertEquals(-2,TimestampWritable.millisToSeconds(-1001));
assertEquals(-2,TimestampWritable.millisToSeconds(-1999));
assertEquals(-2,TimestampWritable.millisToSeconds(-2000));
assertEquals(-3,TimestampWritable.millisToSeconds(-2001));
assertEquals(-99,TimestampWritable.millisToSeconds(-99000));
assertEquals(-100,TimestampWritable.millisToSeconds(-99001));
assertEquals(-100,TimestampWritable.millisToSeconds(-100000));
assertEquals(1,TimestampWritable.millisToSeconds(1500));
assertEquals(19,TimestampWritable.millisToSeconds(19999));
assertEquals(20,TimestampWritable.millisToSeconds(20000));
}
Class: org.apache.hadoop.hive.serde2.lazy.TestLazySimpleSerDe APIUtilityVerifier InternalCallVerifier EqualityVerifier
/**
* Tests the deprecated usage of SerDeParameters.
*/
@Test @SuppressWarnings("deprecation") public void testSerDeParameters() throws SerDeException, IOException {
LazySimpleSerDe serDe=new LazySimpleSerDe();
Configuration conf=new Configuration();
MyTestClass row=new MyTestClass();
ExtraTypeInfo extraTypeInfo=new ExtraTypeInfo();
row.randomFill(new Random(1234),extraTypeInfo);
StructObjectInspector rowOI=(StructObjectInspector)ObjectInspectorFactory.getReflectionObjectInspector(MyTestClass.class,ObjectInspectorOptions.JAVA);
String fieldNames=ObjectInspectorUtils.getFieldNames(rowOI);
String fieldTypes=ObjectInspectorUtils.getFieldTypes(rowOI);
Properties schema=new Properties();
schema.setProperty(serdeConstants.LIST_COLUMNS,fieldNames);
schema.setProperty(serdeConstants.LIST_COLUMN_TYPES,fieldTypes);
SerDeUtils.initializeSerDe(serDe,conf,schema,null);
SerDeParameters serdeParams=LazySimpleSerDe.initSerdeParams(conf,schema,"testSerdeName");
LazyStruct data=(LazyStruct)serializeAndDeserialize(row,rowOI,serDe,serdeParams);
assertEquals((boolean)row.myBool,((LazyBoolean)data.getField(0)).getWritableObject().get());
assertEquals((int)row.myInt,((LazyInteger)data.getField(3)).getWritableObject().get());
}
Class: org.apache.hadoop.hive.serde2.objectinspector.primitive.TestPrimitiveObjectInspectorUtils EqualityVerifier
@Test public void testGetPrimitiveGrouping(){
assertEquals(PrimitiveGrouping.NUMERIC_GROUP,PrimitiveObjectInspectorUtils.getPrimitiveGrouping(PrimitiveCategory.BYTE));
assertEquals(PrimitiveGrouping.NUMERIC_GROUP,PrimitiveObjectInspectorUtils.getPrimitiveGrouping(PrimitiveCategory.SHORT));
assertEquals(PrimitiveGrouping.NUMERIC_GROUP,PrimitiveObjectInspectorUtils.getPrimitiveGrouping(PrimitiveCategory.INT));
assertEquals(PrimitiveGrouping.NUMERIC_GROUP,PrimitiveObjectInspectorUtils.getPrimitiveGrouping(PrimitiveCategory.LONG));
assertEquals(PrimitiveGrouping.NUMERIC_GROUP,PrimitiveObjectInspectorUtils.getPrimitiveGrouping(PrimitiveCategory.FLOAT));
assertEquals(PrimitiveGrouping.NUMERIC_GROUP,PrimitiveObjectInspectorUtils.getPrimitiveGrouping(PrimitiveCategory.DOUBLE));
assertEquals(PrimitiveGrouping.NUMERIC_GROUP,PrimitiveObjectInspectorUtils.getPrimitiveGrouping(PrimitiveCategory.DECIMAL));
assertEquals(PrimitiveGrouping.STRING_GROUP,PrimitiveObjectInspectorUtils.getPrimitiveGrouping(PrimitiveCategory.STRING));
assertEquals(PrimitiveGrouping.DATE_GROUP,PrimitiveObjectInspectorUtils.getPrimitiveGrouping(PrimitiveCategory.DATE));
assertEquals(PrimitiveGrouping.DATE_GROUP,PrimitiveObjectInspectorUtils.getPrimitiveGrouping(PrimitiveCategory.TIMESTAMP));
assertEquals(PrimitiveGrouping.BOOLEAN_GROUP,PrimitiveObjectInspectorUtils.getPrimitiveGrouping(PrimitiveCategory.BOOLEAN));
assertEquals(PrimitiveGrouping.BINARY_GROUP,PrimitiveObjectInspectorUtils.getPrimitiveGrouping(PrimitiveCategory.BINARY));
assertEquals(PrimitiveGrouping.UNKNOWN_GROUP,PrimitiveObjectInspectorUtils.getPrimitiveGrouping(PrimitiveCategory.UNKNOWN));
assertEquals(PrimitiveGrouping.VOID_GROUP,PrimitiveObjectInspectorUtils.getPrimitiveGrouping(PrimitiveCategory.VOID));
}
EqualityVerifier
@Test public void testgetTimestampWithMillisecondsInt(){
DateFormat localDateFormat=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
DateFormat gmtDateFormat=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
gmtDateFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
PrimitiveObjectInspector voidOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.VOID);
assertEquals(null,PrimitiveObjectInspectorUtils.getTimestamp(new Object(),voidOI));
PrimitiveObjectInspector booleanOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.BOOLEAN);
assertEquals("1970-01-01 00:00:00.001",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(true,booleanOI)));
assertEquals("1970-01-01 00:00:00.000",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(false,booleanOI)));
PrimitiveObjectInspector byteOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.BYTE);
assertEquals("1970-01-01 00:00:00.001",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((byte)1,byteOI)));
assertEquals("1969-12-31 23:59:59.999",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((byte)-1,byteOI)));
PrimitiveObjectInspector shortOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.SHORT);
assertEquals("1970-01-01 00:00:00.001",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((short)1,shortOI)));
assertEquals("1969-12-31 23:59:59.999",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((short)-1,shortOI)));
PrimitiveObjectInspector intOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.INT);
assertEquals("1970-01-17 11:22:01.282",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((int)1423321282,intOI)));
assertEquals("1969-12-31 23:59:59.999",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((int)-1,intOI)));
PrimitiveObjectInspector longOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.LONG);
assertEquals("1970-01-17 11:22:01.282",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(1423321282L,longOI)));
assertEquals("1969-12-31 23:59:59.999",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(-1L,longOI)));
PrimitiveObjectInspector floatOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.FLOAT);
assertEquals("2015-02-07 15:02:24.000",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(1423321282.123f,floatOI)));
assertEquals("1969-12-31 23:59:58.876",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(-1.123f,floatOI)));
PrimitiveObjectInspector doubleOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.DOUBLE);
assertEquals("2015-02-07 15:01:22.123",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((double)1423321282.123,doubleOI)));
assertEquals("1969-12-31 23:59:58.877",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((double)-1.123,doubleOI)));
PrimitiveObjectInspector decimalOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.DECIMAL);
assertEquals("2015-02-07 15:01:22.000",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(HiveDecimal.create(1423321282L),decimalOI)));
assertEquals("1969-12-31 23:59:59.000",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(HiveDecimal.create(-1),decimalOI)));
PrimitiveObjectInspector stringOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.STRING);
assertEquals("2015-02-07 15:01:22.123",localDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp("2015-02-07 15:01:22.123",stringOI)));
PrimitiveObjectInspector charOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.CHAR);
assertEquals("2015-02-07 15:01:22.123",localDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(new HiveChar("2015-02-07 15:01:22.123",30),charOI)));
PrimitiveObjectInspector varcharOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.VARCHAR);
assertEquals("2015-02-07 15:01:22.123",localDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(new HiveVarchar("2015-02-07 15:01:22.123",30),varcharOI)));
PrimitiveObjectInspector dateOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.DATE);
assertEquals("2015-02-07 00:00:00.000",localDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(new Date(1423321282123L),dateOI)));
PrimitiveObjectInspector timestampOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.TIMESTAMP);
assertEquals("2015-02-07 15:01:22.123",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(new Timestamp(1423321282123L),timestampOI)));
}
EqualityVerifier
@Test public void testgetTimestampWithSecondsInt(){
DateFormat localDateFormat=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
DateFormat gmtDateFormat=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
gmtDateFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
PrimitiveObjectInspector voidOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.VOID);
assertEquals(null,PrimitiveObjectInspectorUtils.getTimestamp(new Object(),voidOI));
PrimitiveObjectInspector booleanOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.BOOLEAN);
assertEquals("1970-01-01 00:00:01.000",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(true,booleanOI,true)));
assertEquals("1970-01-01 00:00:00.000",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(false,booleanOI,true)));
PrimitiveObjectInspector byteOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.BYTE);
assertEquals("1970-01-01 00:00:01.000",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((byte)1,byteOI,true)));
assertEquals("1969-12-31 23:59:59.000",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((byte)-1,byteOI,true)));
PrimitiveObjectInspector shortOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.SHORT);
assertEquals("1970-01-01 00:00:01.000",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((short)1,shortOI,true)));
assertEquals("1969-12-31 23:59:59.000",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((short)-1,shortOI,true)));
PrimitiveObjectInspector intOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.INT);
assertEquals("2015-02-07 15:01:22.000",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((int)1423321282,intOI,true)));
assertEquals("1969-12-31 23:59:59.000",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((int)-1,intOI,true)));
PrimitiveObjectInspector longOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.LONG);
assertEquals("2015-02-07 15:01:22.000",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(1423321282L,longOI,true)));
assertEquals("1969-12-31 23:59:59.000",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(-1L,longOI,true)));
PrimitiveObjectInspector floatOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.FLOAT);
assertEquals("2015-02-07 15:02:24.000",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(1423321282.123f,floatOI,true)));
assertEquals("1969-12-31 23:59:58.876",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(-1.123f,floatOI,true)));
PrimitiveObjectInspector doubleOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.DOUBLE);
assertEquals("2015-02-07 15:01:22.123",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((double)1423321282.123,doubleOI,true)));
assertEquals("1969-12-31 23:59:58.877",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((double)-1.123,doubleOI,true)));
PrimitiveObjectInspector decimalOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.DECIMAL);
assertEquals("2015-02-07 15:01:22.000",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(HiveDecimal.create(1423321282L),decimalOI,true)));
assertEquals("1969-12-31 23:59:59.000",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(HiveDecimal.create(-1),decimalOI,true)));
PrimitiveObjectInspector stringOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.STRING);
assertEquals("2015-02-07 15:01:22.123",localDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp("2015-02-07 15:01:22.123",stringOI,true)));
PrimitiveObjectInspector charOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.CHAR);
assertEquals("2015-02-07 15:01:22.123",localDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(new HiveChar("2015-02-07 15:01:22.123",30),charOI,true)));
PrimitiveObjectInspector varcharOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.VARCHAR);
assertEquals("2015-02-07 15:01:22.123",localDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(new HiveVarchar("2015-02-07 15:01:22.123",30),varcharOI,true)));
PrimitiveObjectInspector dateOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.DATE);
assertEquals("2015-02-07 00:00:00.000",localDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(new Date(1423321282123L),dateOI,true)));
PrimitiveObjectInspector timestampOI=PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveCategory.TIMESTAMP);
assertEquals("2015-02-07 15:01:22.123",gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(new Timestamp(1423321282123L),timestampOI,true)));
}
Class: org.apache.hive.beeline.TestBeeLineHistory APIUtilityVerifier EqualityVerifier
@Test public void testNumHistories() throws Exception {
ByteArrayOutputStream os=new ByteArrayOutputStream();
PrintStream ops=new PrintStream(os);
BeeLine beeline=new BeeLine();
beeline.getOpts().setHistoryFile(fileName);
beeline.setOutputStream(ops);
beeline.getConsoleReader(null);
beeline.dispatch("!history");
String output=os.toString("UTF-8");
int numHistories=output.split("\n").length;
Assert.assertEquals(numHistories,10);
beeline.close();
}
Class: org.apache.hive.beeline.TestBeelineArgParsing BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
/**
* Test setting hive conf and hive vars with --hiveconf and --hivevar
*/
@Test public void testHiveConfAndVars() throws Exception {
TestBeeline bl=new TestBeeline();
String args[]=new String[]{"-u","url","-n","name","-p","password","-d","driver","--hiveconf","a=avalue","--hiveconf","b=bvalue","--hivevar","c=cvalue","--hivevar","d=dvalue"};
Assert.assertEquals(0,bl.initArgs(args));
Assert.assertTrue(bl.connectArgs.equals("url name password driver"));
Assert.assertTrue(bl.getOpts().getHiveConfVariables().get("a").equals("avalue"));
Assert.assertTrue(bl.getOpts().getHiveConfVariables().get("b").equals("bvalue"));
Assert.assertTrue(bl.getOpts().getHiveVariables().get("c").equals("cvalue"));
Assert.assertTrue(bl.getOpts().getHiveVariables().get("d").equals("dvalue"));
}
InternalCallVerifier EqualityVerifier
/**
* Displays the usage.
*/
@Test public void testHelp() throws Exception {
TestBeeline bl=new TestBeeline();
String args[]=new String[]{"--help"};
Assert.assertEquals(0,bl.initArgs(args));
Assert.assertEquals(true,bl.getOpts().isHelpAsked());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testQueryScripts() throws Exception {
TestBeeline bl=new TestBeeline();
String args[]=new String[]{"-u","url","-n","name","-p","password","-d","driver","-e","select1","-e","select2"};
Assert.assertEquals(0,bl.initArgs(args));
Assert.assertTrue(bl.connectArgs.equals("url name password driver"));
Assert.assertTrue(bl.queries.contains("select1"));
Assert.assertTrue(bl.queries.contains("select2"));
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testBeelineOpts() throws Exception {
TestBeeline bl=new TestBeeline();
String args[]=new String[]{"-u","url","-n","name","-p","password","-d","driver","--autoCommit=true","--verbose","--truncateTable"};
Assert.assertEquals(0,bl.initArgs(args));
Assert.assertTrue(bl.connectArgs.equals("url name password driver"));
Assert.assertTrue(bl.getOpts().getAutoCommit());
Assert.assertTrue(bl.getOpts().getVerbose());
Assert.assertTrue(bl.getOpts().getTruncateTable());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
/**
* The first flag is taken by the parser.
*/
@Test public void testDuplicateArgs() throws Exception {
TestBeeline bl=new TestBeeline();
String args[]=new String[]{"-u","url","-u","url2","-n","name","-p","password","-d","driver"};
Assert.assertEquals(0,bl.initArgs(args));
Assert.assertTrue(bl.connectArgs.equals("url name password driver"));
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testSimpleArgs() throws Exception {
TestBeeline bl=new TestBeeline();
String args[]=new String[]{"-u","url","-n","name","-p","password","-d","driver","-a","authType"};
org.junit.Assert.assertEquals(0,bl.initArgs(args));
Assert.assertTrue(bl.connectArgs.equals("url name password driver"));
Assert.assertTrue(bl.getOpts().getAuthType().equals("authType"));
}
EqualityVerifier
/**
* Displays the usage.
*/
@Test public void testUnmatchedArgs() throws Exception {
TestBeeline bl=new TestBeeline();
String args[]=new String[]{"-u","url","-n"};
Assert.assertEquals(-1,bl.initArgs(args));
}
BranchVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testAddLocalJarWithoutAddDriverClazz() throws Exception {
TestBeeline bl=new TestBeeline();
LOG.info("Add " + driverJarFileName + " for the driver class "+ driverClazzName);
bl.addLocalJar(driverJarFileName);
if (!defaultSupported) {
Assert.assertNull(bl.findLocalDriver(connectionString));
}
else {
Assert.assertEquals(bl.findLocalDriver(connectionString).getClass().getName(),driverClazzName);
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
/**
* Test setting script file with -f option.
*/
@Test public void testScriptFile() throws Exception {
TestBeeline bl=new TestBeeline();
String args[]=new String[]{"-u","url","-n","name","-p","password","-d","driver","-f","myscript"};
Assert.assertEquals(0,bl.initArgs(args));
Assert.assertTrue(bl.connectArgs.equals("url name password driver"));
Assert.assertTrue(bl.getOpts().getScriptFile().equals("myscript"));
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testAddLocalJar() throws Exception {
TestBeeline bl=new TestBeeline();
Assert.assertNull(bl.findLocalDriver(connectionString));
LOG.info("Add " + driverJarFileName + " for the driver class "+ driverClazzName);
bl.addLocalJar(driverJarFileName);
bl.addlocaldrivername(driverClazzName);
Assert.assertEquals(bl.findLocalDriver(connectionString).getClass().getName(),driverClazzName);
}
Class: org.apache.hive.common.util.TestBloomFilter APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testBloomFilterDouble(){
BloomFilter bf=new BloomFilter(10000);
double val=Double.MIN_VALUE;
double val1=1.1d;
double val2=2.2d;
double val3=Double.MAX_VALUE;
assertEquals(false,bf.testDouble(val));
assertEquals(false,bf.testDouble(val1));
assertEquals(false,bf.testDouble(val2));
assertEquals(false,bf.testDouble(val3));
bf.addDouble(val);
assertEquals(true,bf.testDouble(val));
assertEquals(false,bf.testDouble(val1));
assertEquals(false,bf.testDouble(val2));
assertEquals(false,bf.testDouble(val3));
bf.addDouble(val1);
assertEquals(true,bf.testDouble(val));
assertEquals(true,bf.testDouble(val1));
assertEquals(false,bf.testDouble(val2));
assertEquals(false,bf.testDouble(val3));
bf.addDouble(val2);
assertEquals(true,bf.testDouble(val));
assertEquals(true,bf.testDouble(val1));
assertEquals(true,bf.testDouble(val2));
assertEquals(false,bf.testDouble(val3));
bf.addDouble(val3);
assertEquals(true,bf.testDouble(val));
assertEquals(true,bf.testDouble(val1));
assertEquals(true,bf.testDouble(val2));
assertEquals(true,bf.testDouble(val3));
double randVal=0;
for (int i=0; i < COUNT; i++) {
randVal=rand.nextDouble();
bf.addDouble(randVal);
}
assertEquals(true,bf.testDouble(randVal));
assertEquals(false,bf.testDouble(-120.2d));
assertEquals(7800,bf.sizeInBytes());
}
InternalCallVerifier EqualityVerifier
@Test public void testMerge(){
BloomFilter bf=new BloomFilter(10000);
String val="bloo";
String val1="bloom fil";
String val2="bloom filter";
String val3="cuckoo filter";
bf.addString(val);
bf.addString(val1);
bf.addString(val2);
bf.addString(val3);
BloomFilter bf2=new BloomFilter(10000);
String v="2_bloo";
String v1="2_bloom fil";
String v2="2_bloom filter";
String v3="2_cuckoo filter";
bf2.addString(v);
bf2.addString(v1);
bf2.addString(v2);
bf2.addString(v3);
assertEquals(true,bf.testString(val));
assertEquals(true,bf.testString(val1));
assertEquals(true,bf.testString(val2));
assertEquals(true,bf.testString(val3));
assertEquals(false,bf.testString(v));
assertEquals(false,bf.testString(v1));
assertEquals(false,bf.testString(v2));
assertEquals(false,bf.testString(v3));
bf.merge(bf2);
assertEquals(true,bf.testString(val));
assertEquals(true,bf.testString(val1));
assertEquals(true,bf.testString(val2));
assertEquals(true,bf.testString(val3));
assertEquals(true,bf.testString(v));
assertEquals(true,bf.testString(v1));
assertEquals(true,bf.testString(v2));
assertEquals(true,bf.testString(v3));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testBloomFilterString(){
BloomFilter bf=new BloomFilter(100000);
String val="bloo";
String val1="bloom fil";
String val2="bloom filter";
String val3="cuckoo filter";
assertEquals(false,bf.testString(val));
assertEquals(false,bf.testString(val1));
assertEquals(false,bf.testString(val2));
assertEquals(false,bf.testString(val3));
bf.addString(val);
assertEquals(true,bf.testString(val));
assertEquals(false,bf.testString(val1));
assertEquals(false,bf.testString(val2));
assertEquals(false,bf.testString(val3));
bf.addString(val1);
assertEquals(true,bf.testString(val));
assertEquals(true,bf.testString(val1));
assertEquals(false,bf.testString(val2));
assertEquals(false,bf.testString(val3));
bf.addString(val2);
assertEquals(true,bf.testString(val));
assertEquals(true,bf.testString(val1));
assertEquals(true,bf.testString(val2));
assertEquals(false,bf.testString(val3));
bf.addString(val3);
assertEquals(true,bf.testString(val));
assertEquals(true,bf.testString(val1));
assertEquals(true,bf.testString(val2));
assertEquals(true,bf.testString(val3));
long randVal=0;
for (int i=0; i < COUNT; i++) {
randVal=rand.nextLong();
bf.addString(Long.toString(randVal));
}
assertEquals(true,bf.testString(Long.toString(randVal)));
assertEquals(false,bf.testString(Long.toString(-120)));
assertEquals(77944,bf.sizeInBytes());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testBloomFilterFloat(){
BloomFilter bf=new BloomFilter(10000);
float val=Float.MIN_VALUE;
float val1=1.1f;
float val2=2.2f;
float val3=Float.MAX_VALUE;
assertEquals(false,bf.testDouble(val));
assertEquals(false,bf.testDouble(val1));
assertEquals(false,bf.testDouble(val2));
assertEquals(false,bf.testDouble(val3));
bf.addDouble(val);
assertEquals(true,bf.testDouble(val));
assertEquals(false,bf.testDouble(val1));
assertEquals(false,bf.testDouble(val2));
assertEquals(false,bf.testDouble(val3));
bf.addDouble(val1);
assertEquals(true,bf.testDouble(val));
assertEquals(true,bf.testDouble(val1));
assertEquals(false,bf.testDouble(val2));
assertEquals(false,bf.testDouble(val3));
bf.addDouble(val2);
assertEquals(true,bf.testDouble(val));
assertEquals(true,bf.testDouble(val1));
assertEquals(true,bf.testDouble(val2));
assertEquals(false,bf.testDouble(val3));
bf.addDouble(val3);
assertEquals(true,bf.testDouble(val));
assertEquals(true,bf.testDouble(val1));
assertEquals(true,bf.testDouble(val2));
assertEquals(true,bf.testDouble(val3));
float randVal=0;
for (int i=0; i < COUNT; i++) {
randVal=rand.nextFloat();
bf.addDouble(randVal);
}
assertEquals(true,bf.testDouble(randVal));
assertEquals(false,bf.testDouble(-120.2f));
assertEquals(7800,bf.sizeInBytes());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testBloomFilterLong(){
BloomFilter bf=new BloomFilter(10000);
long val=Long.MIN_VALUE;
long val1=1;
long val2=2;
long val3=Long.MAX_VALUE;
assertEquals(false,bf.testLong(val));
assertEquals(false,bf.testLong(val1));
assertEquals(false,bf.testLong(val2));
assertEquals(false,bf.testLong(val3));
bf.addLong(val);
assertEquals(true,bf.testLong(val));
assertEquals(false,bf.testLong(val1));
assertEquals(false,bf.testLong(val2));
assertEquals(false,bf.testLong(val3));
bf.addLong(val1);
assertEquals(true,bf.testLong(val));
assertEquals(true,bf.testLong(val1));
assertEquals(false,bf.testLong(val2));
assertEquals(false,bf.testLong(val3));
bf.addLong(val2);
assertEquals(true,bf.testLong(val));
assertEquals(true,bf.testLong(val1));
assertEquals(true,bf.testLong(val2));
assertEquals(false,bf.testLong(val3));
bf.addLong(val3);
assertEquals(true,bf.testLong(val));
assertEquals(true,bf.testLong(val1));
assertEquals(true,bf.testLong(val2));
assertEquals(true,bf.testLong(val3));
long randVal=0;
for (int i=0; i < COUNT; i++) {
randVal=rand.nextLong();
bf.addLong(randVal);
}
assertEquals(true,bf.testLong(randVal));
assertEquals(false,bf.testLong(-120));
assertEquals(7800,bf.sizeInBytes());
}
InternalCallVerifier EqualityVerifier
@Test public void testBloomFilterBytes(){
BloomFilter bf=new BloomFilter(10000);
byte[] val=new byte[]{1,2,3};
byte[] val1=new byte[]{1,2,3,4};
byte[] val2=new byte[]{1,2,3,4,5};
byte[] val3=new byte[]{1,2,3,4,5,6};
assertEquals(false,bf.test(val));
assertEquals(false,bf.test(val1));
assertEquals(false,bf.test(val2));
assertEquals(false,bf.test(val3));
bf.add(val);
assertEquals(true,bf.test(val));
assertEquals(false,bf.test(val1));
assertEquals(false,bf.test(val2));
assertEquals(false,bf.test(val3));
bf.add(val1);
assertEquals(true,bf.test(val));
assertEquals(true,bf.test(val1));
assertEquals(false,bf.test(val2));
assertEquals(false,bf.test(val3));
bf.add(val2);
assertEquals(true,bf.test(val));
assertEquals(true,bf.test(val1));
assertEquals(true,bf.test(val2));
assertEquals(false,bf.test(val3));
bf.add(val3);
assertEquals(true,bf.test(val));
assertEquals(true,bf.test(val1));
assertEquals(true,bf.test(val2));
assertEquals(true,bf.test(val3));
byte[] randVal=new byte[COUNT];
for (int i=0; i < COUNT; i++) {
rand.nextBytes(randVal);
bf.add(randVal);
}
assertEquals(true,bf.test(randVal));
randVal[0]=0;
randVal[1]=0;
randVal[2]=0;
randVal[3]=0;
randVal[4]=0;
assertEquals(false,bf.test(randVal));
assertEquals(7800,bf.sizeInBytes());
}
EqualityVerifier
@Test public void testBloomNumHashFunctions(){
assertEquals(1,BloomFilter.optimalNumOfHashFunctions(-1,-1));
assertEquals(1,BloomFilter.optimalNumOfHashFunctions(0,0));
assertEquals(1,BloomFilter.optimalNumOfHashFunctions(10,0));
assertEquals(1,BloomFilter.optimalNumOfHashFunctions(10,10));
assertEquals(7,BloomFilter.optimalNumOfHashFunctions(10,100));
assertEquals(1,BloomFilter.optimalNumOfHashFunctions(100,100));
assertEquals(1,BloomFilter.optimalNumOfHashFunctions(1000,100));
assertEquals(1,BloomFilter.optimalNumOfHashFunctions(10000,100));
assertEquals(1,BloomFilter.optimalNumOfHashFunctions(100000,100));
assertEquals(1,BloomFilter.optimalNumOfHashFunctions(1000000,100));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testBloomFilterByte(){
BloomFilter bf=new BloomFilter(10000);
byte val=Byte.MIN_VALUE;
byte val1=1;
byte val2=2;
byte val3=Byte.MAX_VALUE;
assertEquals(false,bf.testLong(val));
assertEquals(false,bf.testLong(val1));
assertEquals(false,bf.testLong(val2));
assertEquals(false,bf.testLong(val3));
bf.addLong(val);
assertEquals(true,bf.testLong(val));
assertEquals(false,bf.testLong(val1));
assertEquals(false,bf.testLong(val2));
assertEquals(false,bf.testLong(val3));
bf.addLong(val1);
assertEquals(true,bf.testLong(val));
assertEquals(true,bf.testLong(val1));
assertEquals(false,bf.testLong(val2));
assertEquals(false,bf.testLong(val3));
bf.addLong(val2);
assertEquals(true,bf.testLong(val));
assertEquals(true,bf.testLong(val1));
assertEquals(true,bf.testLong(val2));
assertEquals(false,bf.testLong(val3));
bf.addLong(val3);
assertEquals(true,bf.testLong(val));
assertEquals(true,bf.testLong(val1));
assertEquals(true,bf.testLong(val2));
assertEquals(true,bf.testLong(val3));
byte randVal=0;
for (int i=0; i < COUNT; i++) {
randVal=(byte)rand.nextInt(Byte.MAX_VALUE);
bf.addLong(randVal);
}
assertEquals(true,bf.testLong(randVal));
assertEquals(false,bf.testLong((byte)-120));
assertEquals(7800,bf.sizeInBytes());
}
EqualityVerifier
@Test public void testBloomNumBits(){
assertEquals(0,BloomFilter.optimalNumOfBits(0,0));
assertEquals(0,BloomFilter.optimalNumOfBits(0,1));
assertEquals(0,BloomFilter.optimalNumOfBits(1,1));
assertEquals(7,BloomFilter.optimalNumOfBits(1,0.03));
assertEquals(72,BloomFilter.optimalNumOfBits(10,0.03));
assertEquals(729,BloomFilter.optimalNumOfBits(100,0.03));
assertEquals(7298,BloomFilter.optimalNumOfBits(1000,0.03));
assertEquals(72984,BloomFilter.optimalNumOfBits(10000,0.03));
assertEquals(729844,BloomFilter.optimalNumOfBits(100000,0.03));
assertEquals(7298440,BloomFilter.optimalNumOfBits(1000000,0.03));
assertEquals(6235224,BloomFilter.optimalNumOfBits(1000000,0.05));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testBloomFilterInt(){
BloomFilter bf=new BloomFilter(10000);
int val=Integer.MIN_VALUE;
int val1=1;
int val2=2;
int val3=Integer.MAX_VALUE;
assertEquals(false,bf.testLong(val));
assertEquals(false,bf.testLong(val1));
assertEquals(false,bf.testLong(val2));
assertEquals(false,bf.testLong(val3));
bf.addLong(val);
assertEquals(true,bf.testLong(val));
assertEquals(false,bf.testLong(val1));
assertEquals(false,bf.testLong(val2));
assertEquals(false,bf.testLong(val3));
bf.addLong(val1);
assertEquals(true,bf.testLong(val));
assertEquals(true,bf.testLong(val1));
assertEquals(false,bf.testLong(val2));
assertEquals(false,bf.testLong(val3));
bf.addLong(val2);
assertEquals(true,bf.testLong(val));
assertEquals(true,bf.testLong(val1));
assertEquals(true,bf.testLong(val2));
assertEquals(false,bf.testLong(val3));
bf.addLong(val3);
assertEquals(true,bf.testLong(val));
assertEquals(true,bf.testLong(val1));
assertEquals(true,bf.testLong(val2));
assertEquals(true,bf.testLong(val3));
int randVal=0;
for (int i=0; i < COUNT; i++) {
randVal=rand.nextInt();
bf.addLong(randVal);
}
assertEquals(true,bf.testLong(randVal));
assertEquals(false,bf.testLong(-120));
assertEquals(7800,bf.sizeInBytes());
}
Class: org.apache.hive.common.util.TestMurmur3 APIUtilityVerifier IterativeVerifier EqualityVerifier PublicFieldVerifier
@Test public void testHashCodesM3_128_longs(){
int seed=123;
Random rand=new Random(seed);
HashFunction hf=Hashing.murmur3_128(seed);
for (int i=0; i < 1000; i++) {
long val=rand.nextLong();
byte[] data=ByteBuffer.allocate(8).putLong(val).array();
ByteBuffer buf=ByteBuffer.allocate(16).order(ByteOrder.LITTLE_ENDIAN);
buf.put(hf.hashBytes(data).asBytes());
buf.flip();
long gl1=buf.getLong();
long gl2=buf.getLong(8);
long[] hc=Murmur3.hash128(data,0,data.length,seed);
long m1=hc[0];
long m2=hc[1];
assertEquals(gl1,m1);
assertEquals(gl2,m2);
}
}
APIUtilityVerifier IterativeVerifier EqualityVerifier PublicFieldVerifier
@Test public void testHashCodesM3_128_ints(){
int seed=123;
Random rand=new Random(seed);
HashFunction hf=Hashing.murmur3_128(seed);
for (int i=0; i < 1000; i++) {
int val=rand.nextInt();
byte[] data=ByteBuffer.allocate(4).putInt(val).array();
ByteBuffer buf=ByteBuffer.allocate(16).order(ByteOrder.LITTLE_ENDIAN);
buf.put(hf.hashBytes(data).asBytes());
buf.flip();
long gl1=buf.getLong();
long gl2=buf.getLong(8);
long[] hc=Murmur3.hash128(data,0,data.length,seed);
long m1=hc[0];
long m2=hc[1];
assertEquals(gl1,m1);
assertEquals(gl2,m2);
byte[] offsetData=new byte[data.length + 50];
System.arraycopy(data,0,offsetData,50,data.length);
hc=Murmur3.hash128(offsetData,50,data.length,seed);
assertEquals(gl1,hc[0]);
assertEquals(gl2,hc[1]);
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier
@Test public void testHashCodesM3_32_ints(){
int seed=123;
Random rand=new Random(seed);
HashFunction hf=Hashing.murmur3_32(seed);
for (int i=0; i < 1000; i++) {
int val=rand.nextInt();
byte[] data=ByteBuffer.allocate(4).putInt(val).array();
int hc1=hf.hashBytes(data).asInt();
int hc2=Murmur3.hash32(data,data.length,seed);
assertEquals(hc1,hc2);
}
}
APIUtilityVerifier IterativeVerifier EqualityVerifier PublicFieldVerifier
@Test public void testHashCodesM3_128_double(){
int seed=123;
Random rand=new Random(seed);
HashFunction hf=Hashing.murmur3_128(seed);
for (int i=0; i < 1000; i++) {
double val=rand.nextDouble();
byte[] data=ByteBuffer.allocate(8).putDouble(val).array();
ByteBuffer buf=ByteBuffer.allocate(16).order(ByteOrder.LITTLE_ENDIAN);
buf.put(hf.hashBytes(data).asBytes());
buf.flip();
long gl1=buf.getLong();
long gl2=buf.getLong(8);
long[] hc=Murmur3.hash128(data,0,data.length,seed);
long m1=hc[0];
long m2=hc[1];
assertEquals(gl1,m1);
assertEquals(gl2,m2);
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier
@Test public void testHashCodesM3_32_longs(){
int seed=123;
Random rand=new Random(seed);
HashFunction hf=Hashing.murmur3_32(seed);
for (int i=0; i < 1000; i++) {
long val=rand.nextLong();
byte[] data=ByteBuffer.allocate(8).putLong(val).array();
int hc1=hf.hashBytes(data).asInt();
int hc2=Murmur3.hash32(data,data.length,seed);
assertEquals(hc1,hc2);
}
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testHashCodesM3_32_string(){
String key="test";
int seed=123;
HashFunction hf=Hashing.murmur3_32(seed);
int hc1=hf.hashBytes(key.getBytes()).asInt();
int hc2=Murmur3.hash32(key.getBytes(),key.getBytes().length,seed);
assertEquals(hc1,hc2);
key="testkey";
hc1=hf.hashBytes(key.getBytes()).asInt();
hc2=Murmur3.hash32(key.getBytes(),key.getBytes().length,seed);
assertEquals(hc1,hc2);
}
APIUtilityVerifier EqualityVerifier PublicFieldVerifier
@Test public void testHashCodesM3_128_string(){
String key="test";
int seed=123;
HashFunction hf=Hashing.murmur3_128(seed);
ByteBuffer buf=ByteBuffer.allocate(16).order(ByteOrder.LITTLE_ENDIAN);
buf.put(hf.hashBytes(key.getBytes()).asBytes());
buf.flip();
long gl1=buf.getLong();
long gl2=buf.getLong(8);
long[] hc=Murmur3.hash128(key.getBytes(),0,key.getBytes().length,seed);
long m1=hc[0];
long m2=hc[1];
assertEquals(gl1,m1);
assertEquals(gl2,m2);
key="testkey128_testkey128";
buf=ByteBuffer.allocate(16).order(ByteOrder.LITTLE_ENDIAN);
buf.put(hf.hashBytes(key.getBytes()).asBytes());
buf.flip();
gl1=buf.getLong();
gl2=buf.getLong(8);
byte[] keyBytes=key.getBytes();
hc=Murmur3.hash128(keyBytes,0,keyBytes.length,seed);
m1=hc[0];
m2=hc[1];
assertEquals(gl1,m1);
assertEquals(gl2,m2);
byte[] offsetKeyBytes=new byte[keyBytes.length + 35];
Arrays.fill(offsetKeyBytes,(byte)-1);
System.arraycopy(keyBytes,0,offsetKeyBytes,35,keyBytes.length);
hc=Murmur3.hash128(offsetKeyBytes,35,keyBytes.length,seed);
assertEquals(gl1,hc[0]);
assertEquals(gl2,hc[1]);
}
APIUtilityVerifier EqualityVerifier PublicFieldVerifier
@Test public void testHashCodeM3_64(){
byte[] origin=("It was the best of times, it was the worst of times," + " it was the age of wisdom, it was the age of foolishness," + " it was the epoch of belief, it was the epoch of incredulity,"+ " it was the season of Light, it was the season of Darkness,"+ " it was the spring of hope, it was the winter of despair,"+ " we had everything before us, we had nothing before us,"+ " we were all going direct to Heaven,"+ " we were all going direct the other way.").getBytes();
long hash=Murmur3.hash64(origin,0,origin.length);
assertEquals(305830725663368540L,hash);
byte[] originOffset=new byte[origin.length + 150];
Arrays.fill(originOffset,(byte)123);
System.arraycopy(origin,0,originOffset,150,origin.length);
hash=Murmur3.hash64(originOffset,150,origin.length);
assertEquals(305830725663368540L,hash);
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier
@Test public void testHashCodesM3_32_double(){
int seed=123;
Random rand=new Random(seed);
HashFunction hf=Hashing.murmur3_32(seed);
for (int i=0; i < 1000; i++) {
double val=rand.nextDouble();
byte[] data=ByteBuffer.allocate(8).putDouble(val).array();
int hc1=hf.hashBytes(data).asInt();
int hc2=Murmur3.hash32(data,data.length,seed);
assertEquals(hc1,hc2);
}
}
Class: org.apache.hive.common.util.TestShutdownHookManager BooleanVerifier EqualityVerifier HybridVerifier
@Test public void shutdownHookManager(){
Assert.assertEquals(1,ShutdownHookManager.getShutdownHooksInOrder().size());
Runnable hook1=new Runnable(){
@Override public void run(){
}
}
;
Runnable hook2=new Runnable(){
@Override public void run(){
}
}
;
ShutdownHookManager.addShutdownHook(hook1,0);
Assert.assertTrue(ShutdownHookManager.hasShutdownHook(hook1));
Assert.assertEquals(2,ShutdownHookManager.getShutdownHooksInOrder().size());
Assert.assertEquals(hook1,ShutdownHookManager.getShutdownHooksInOrder().get(0));
ShutdownHookManager.removeShutdownHook(hook1);
Assert.assertFalse(ShutdownHookManager.hasShutdownHook(hook1));
ShutdownHookManager.addShutdownHook(hook1,0);
Assert.assertTrue(ShutdownHookManager.hasShutdownHook(hook1));
Assert.assertEquals(2,ShutdownHookManager.getShutdownHooksInOrder().size());
Assert.assertTrue(ShutdownHookManager.hasShutdownHook(hook1));
Assert.assertEquals(2,ShutdownHookManager.getShutdownHooksInOrder().size());
ShutdownHookManager.addShutdownHook(hook2,1);
Assert.assertTrue(ShutdownHookManager.hasShutdownHook(hook1));
Assert.assertTrue(ShutdownHookManager.hasShutdownHook(hook2));
Assert.assertEquals(3,ShutdownHookManager.getShutdownHooksInOrder().size());
Assert.assertEquals(hook2,ShutdownHookManager.getShutdownHooksInOrder().get(0));
Assert.assertEquals(hook1,ShutdownHookManager.getShutdownHooksInOrder().get(1));
}
Class: org.apache.hive.hcatalog.api.TestHCatClient APIUtilityVerifier BranchVerifier UtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testBasicDDLCommands() throws Exception {
String db="testdb";
String tableOne="testTable1";
String tableTwo="testTable2";
String tableThree="testTable3";
HCatClient client=HCatClient.create(new Configuration(hcatConf));
client.dropDatabase(db,true,HCatClient.DropDBMode.CASCADE);
HCatCreateDBDesc dbDesc=HCatCreateDBDesc.create(db).ifNotExists(false).build();
client.createDatabase(dbDesc);
List dbNames=client.listDatabaseNamesByPattern("*");
assertTrue(dbNames.contains("default"));
assertTrue(dbNames.contains(db));
HCatDatabase testDb=client.getDatabase(db);
assertTrue(testDb.getComment() == null);
assertTrue(testDb.getProperties().size() == 0);
String warehouseDir=System.getProperty("test.warehouse.dir","/user/hive/warehouse");
if (useExternalMS) {
assertTrue(testDb.getLocation().matches(".*" + "/" + db + ".db"));
}
else {
String expectedDir=warehouseDir.replaceFirst("pfile:///","pfile:/");
assertEquals(expectedDir + "/" + db+ ".db",testDb.getLocation());
}
ArrayList cols=new ArrayList();
cols.add(new HCatFieldSchema("id",Type.INT,"id comment"));
cols.add(new HCatFieldSchema("value",Type.STRING,"value comment"));
HCatCreateTableDesc tableDesc=HCatCreateTableDesc.create(db,tableOne,cols).fileFormat("rcfile").build();
client.createTable(tableDesc);
HCatTable table1=client.getTable(db,tableOne);
assertTrue(table1.getInputFileFormat().equalsIgnoreCase(RCFileInputFormat.class.getName()));
assertTrue(table1.getOutputFileFormat().equalsIgnoreCase(RCFileOutputFormat.class.getName()));
assertTrue(table1.getSerdeLib().equalsIgnoreCase(LazyBinaryColumnarSerDe.class.getName()));
assertTrue(table1.getCols().equals(cols));
try {
client.createTable(tableDesc);
fail("Expected exception");
}
catch ( HCatException e) {
assertTrue(e.getMessage().contains("AlreadyExistsException while creating table."));
}
client.dropTable(db,tableOne,true);
HCatCreateTableDesc tableDesc2=HCatCreateTableDesc.create(db,tableTwo,cols).fieldsTerminatedBy('\001').escapeChar('\002').linesTerminatedBy('\003').mapKeysTerminatedBy('\004').collectionItemsTerminatedBy('\005').nullDefinedAs('\006').build();
client.createTable(tableDesc2);
HCatTable table2=client.getTable(db,tableTwo);
assertTrue("Expected TextInputFormat, but got: " + table2.getInputFileFormat(),table2.getInputFileFormat().equalsIgnoreCase(TextInputFormat.class.getName()));
assertTrue(table2.getOutputFileFormat().equalsIgnoreCase(HiveIgnoreKeyTextOutputFormat.class.getName()));
assertTrue("SerdeParams not found",table2.getSerdeParams() != null);
assertEquals("checking " + serdeConstants.FIELD_DELIM,Character.toString('\001'),table2.getSerdeParams().get(serdeConstants.FIELD_DELIM));
assertEquals("checking " + serdeConstants.ESCAPE_CHAR,Character.toString('\002'),table2.getSerdeParams().get(serdeConstants.ESCAPE_CHAR));
assertEquals("checking " + serdeConstants.LINE_DELIM,Character.toString('\003'),table2.getSerdeParams().get(serdeConstants.LINE_DELIM));
assertEquals("checking " + serdeConstants.MAPKEY_DELIM,Character.toString('\004'),table2.getSerdeParams().get(serdeConstants.MAPKEY_DELIM));
assertEquals("checking " + serdeConstants.COLLECTION_DELIM,Character.toString('\005'),table2.getSerdeParams().get(serdeConstants.COLLECTION_DELIM));
assertEquals("checking " + serdeConstants.SERIALIZATION_NULL_FORMAT,Character.toString('\006'),table2.getSerdeParams().get(serdeConstants.SERIALIZATION_NULL_FORMAT));
assertTrue(table2.getLocation().toLowerCase().matches(".*" + ("/" + db + ".db/"+ tableTwo).toLowerCase()));
HCatCreateTableDesc tableDesc3=HCatCreateTableDesc.create(db,tableThree,cols).fileFormat("orcfile").build();
client.createTable(tableDesc3);
HCatTable table3=client.getTable(db,tableThree);
assertTrue(table3.getInputFileFormat().equalsIgnoreCase(OrcInputFormat.class.getName()));
assertTrue(table3.getOutputFileFormat().equalsIgnoreCase(OrcOutputFormat.class.getName()));
assertTrue(table3.getSerdeLib().equalsIgnoreCase(OrcSerde.class.getName()));
assertTrue(table1.getCols().equals(cols));
client.close();
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testPartitionSchema() throws Exception {
try {
HCatClient client=HCatClient.create(new Configuration(hcatConf));
final String dbName="myDb";
final String tableName="myTable";
client.dropDatabase(dbName,true,HCatClient.DropDBMode.CASCADE);
client.createDatabase(HCatCreateDBDesc.create(dbName).build());
List columnSchema=Arrays.asList(new HCatFieldSchema("foo",Type.INT,""),new HCatFieldSchema("bar",Type.STRING,""));
List partitionSchema=Arrays.asList(new HCatFieldSchema("dt",Type.STRING,""),new HCatFieldSchema("grid",Type.STRING,""));
client.createTable(HCatCreateTableDesc.create(dbName,tableName,columnSchema).partCols(partitionSchema).build());
HCatTable table=client.getTable(dbName,tableName);
List partitionColumns=table.getPartCols();
assertArrayEquals("Didn't get expected partition-schema back from the HCatTable.",partitionSchema.toArray(),partitionColumns.toArray());
client.dropDatabase(dbName,false,HCatClient.DropDBMode.CASCADE);
}
catch ( Exception unexpected) {
LOG.error("Unexpected exception!",unexpected);
assertTrue("Unexpected exception! " + unexpected.getMessage(),false);
}
}
IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* Test that partition-definitions can be replicated between HCat-instances,
* independently of table-metadata replication, using PartitionSpec interfaces.
* (This is essentially the same test as testPartitionRegistrationWithCustomSchema(),
* transliterated to use the PartitionSpec APIs.)
* 2 identical tables are created on 2 different HCat instances ("source" and "target").
* On the source instance,
* 1. One partition is added with the old format ("TEXTFILE").
* 2. The table is updated with an additional column and the data-format changed to ORC.
* 3. Another partition is added with the new format.
* 4. The partitions' metadata is copied to the target HCat instance, without updating the target table definition.
* 5. The partitions' metadata is tested to be an exact replica of that on the source.
* @throws Exception
*/
@Test public void testPartitionSpecRegistrationWithCustomSchema() throws Exception {
try {
startReplicationTargetMetaStoreIfRequired();
HCatClient sourceMetaStore=HCatClient.create(new Configuration(hcatConf));
final String dbName="myDb";
final String tableName="myTable";
sourceMetaStore.dropDatabase(dbName,true,HCatClient.DropDBMode.CASCADE);
sourceMetaStore.createDatabase(HCatCreateDBDesc.create(dbName).build());
List columnSchema=new ArrayList(Arrays.asList(new HCatFieldSchema("foo",Type.INT,""),new HCatFieldSchema("bar",Type.STRING,"")));
List partitionSchema=Arrays.asList(new HCatFieldSchema("dt",Type.STRING,""),new HCatFieldSchema("grid",Type.STRING,""));
HCatTable sourceTable=new HCatTable(dbName,tableName).cols(columnSchema).partCols(partitionSchema).comment("Source table.");
sourceMetaStore.createTable(HCatCreateTableDesc.create(sourceTable).build());
sourceTable=sourceMetaStore.getTable(dbName,tableName);
assertNotNull("Table couldn't be queried for. ",sourceTable);
Map partitionSpec_1=new HashMap();
partitionSpec_1.put("grid","AB");
partitionSpec_1.put("dt","2011_12_31");
HCatPartition sourcePartition_1=new HCatPartition(sourceTable,partitionSpec_1,makePartLocation(sourceTable,partitionSpec_1));
sourceMetaStore.addPartition(HCatAddPartitionDesc.create(sourcePartition_1).build());
assertEquals("Unexpected number of partitions. ",1,sourceMetaStore.getPartitions(dbName,tableName).size());
HCatPartition addedPartition_1=sourceMetaStore.getPartition(dbName,tableName,partitionSpec_1);
assertEquals("Column schema doesn't match.",sourceTable.getCols(),addedPartition_1.getColumns());
assertEquals("InputFormat doesn't match.",sourceTable.getInputFileFormat(),addedPartition_1.getInputFormat());
assertEquals("OutputFormat doesn't match.",sourceTable.getOutputFileFormat(),addedPartition_1.getOutputFormat());
assertEquals("SerDe doesn't match.",sourceTable.getSerdeLib(),addedPartition_1.getSerDe());
assertEquals("SerDe params don't match.",sourceTable.getSerdeParams(),addedPartition_1.getSerdeParams());
HCatClient targetMetaStore=HCatClient.create(new Configuration(replicationTargetHCatConf));
targetMetaStore.dropDatabase(dbName,true,HCatClient.DropDBMode.CASCADE);
targetMetaStore.createDatabase(HCatCreateDBDesc.create(dbName).build());
HCatTable targetTable=targetMetaStore.deserializeTable(sourceMetaStore.serializeTable(sourceTable));
targetMetaStore.createTable(HCatCreateTableDesc.create(targetTable).build());
targetTable=targetMetaStore.getTable(dbName,tableName);
assertEquals("Created table doesn't match the source.",HCatTable.NO_DIFF,targetTable.diff(sourceTable));
List newColumnSchema=new ArrayList(columnSchema);
newColumnSchema.add(new HCatFieldSchema("goo_new",Type.DOUBLE,""));
Map tableParams=new HashMap(1);
tableParams.put("orc.compress","ZLIB");
sourceTable.cols(newColumnSchema).fileFormat("orcfile").tblProps(tableParams).serdeParam(serdeConstants.FIELD_DELIM,Character.toString('\001'));
sourceMetaStore.updateTableSchema(dbName,tableName,sourceTable);
sourceTable=sourceMetaStore.getTable(dbName,tableName);
Map partitionSpec_2=new HashMap();
partitionSpec_2.put("grid","AB");
partitionSpec_2.put("dt","2012_01_01");
HCatPartition sourcePartition_2=new HCatPartition(sourceTable,partitionSpec_2,makePartLocation(sourceTable,partitionSpec_2));
sourceMetaStore.addPartition(HCatAddPartitionDesc.create(sourcePartition_2).build());
HCatPartitionSpec sourcePartitionSpec=sourceMetaStore.getPartitionSpecs(dbName,tableName,-1);
assertEquals("Unexpected number of source partitions.",2,sourcePartitionSpec.size());
List partitionSpecString=sourceMetaStore.serializePartitionSpec(sourcePartitionSpec);
HCatPartitionSpec targetPartitionSpec=targetMetaStore.deserializePartitionSpec(partitionSpecString);
assertEquals("Could not add the expected number of partitions.",sourcePartitionSpec.size(),targetMetaStore.addPartitionSpec(targetPartitionSpec));
targetPartitionSpec=targetMetaStore.getPartitionSpecs(dbName,tableName,-1);
assertEquals("Could not retrieve the expected number of partitions.",sourcePartitionSpec.size(),targetPartitionSpec.size());
HCatPartitionSpec.HCatPartitionIterator sourceIterator=sourcePartitionSpec.getPartitionIterator();
HCatPartitionSpec.HCatPartitionIterator targetIterator=targetPartitionSpec.getPartitionIterator();
while (targetIterator.hasNext()) {
assertTrue("Fewer target partitions than source.",sourceIterator.hasNext());
HCatPartition sourcePartition=sourceIterator.next();
HCatPartition targetPartition=targetIterator.next();
assertEquals("Column schema doesn't match.",sourcePartition.getColumns(),targetPartition.getColumns());
assertEquals("InputFormat doesn't match.",sourcePartition.getInputFormat(),targetPartition.getInputFormat());
assertEquals("OutputFormat doesn't match.",sourcePartition.getOutputFormat(),targetPartition.getOutputFormat());
assertEquals("SerDe doesn't match.",sourcePartition.getSerDe(),targetPartition.getSerDe());
assertEquals("SerDe params don't match.",sourcePartition.getSerdeParams(),targetPartition.getSerdeParams());
}
}
catch ( Exception unexpected) {
LOG.error("Unexpected exception! ",unexpected);
assertTrue("Unexpected exception! " + unexpected.getMessage(),false);
}
}
APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* Test that partition-definitions can be replicated between HCat-instances,
* independently of table-metadata replication.
* 2 identical tables are created on 2 different HCat instances ("source" and "target").
* On the source instance,
* 1. One partition is added with the old format ("TEXTFILE").
* 2. The table is updated with an additional column and the data-format changed to ORC.
* 3. Another partition is added with the new format.
* 4. The partitions' metadata is copied to the target HCat instance, without updating the target table definition.
* 5. The partitions' metadata is tested to be an exact replica of that on the source.
* @throws Exception
*/
@Test public void testPartitionRegistrationWithCustomSchema() throws Exception {
try {
startReplicationTargetMetaStoreIfRequired();
HCatClient sourceMetaStore=HCatClient.create(new Configuration(hcatConf));
final String dbName="myDb";
final String tableName="myTable";
sourceMetaStore.dropDatabase(dbName,true,HCatClient.DropDBMode.CASCADE);
sourceMetaStore.createDatabase(HCatCreateDBDesc.create(dbName).build());
List columnSchema=new ArrayList(Arrays.asList(new HCatFieldSchema("foo",Type.INT,""),new HCatFieldSchema("bar",Type.STRING,"")));
List partitionSchema=Arrays.asList(new HCatFieldSchema("dt",Type.STRING,""),new HCatFieldSchema("grid",Type.STRING,""));
HCatTable sourceTable=new HCatTable(dbName,tableName).cols(columnSchema).partCols(partitionSchema).comment("Source table.");
sourceMetaStore.createTable(HCatCreateTableDesc.create(sourceTable).build());
sourceTable=sourceMetaStore.getTable(dbName,tableName);
assertNotNull("Table couldn't be queried for. ",sourceTable);
Map partitionSpec_1=new HashMap();
partitionSpec_1.put("grid","AB");
partitionSpec_1.put("dt","2011_12_31");
HCatPartition sourcePartition_1=new HCatPartition(sourceTable,partitionSpec_1,makePartLocation(sourceTable,partitionSpec_1));
sourceMetaStore.addPartition(HCatAddPartitionDesc.create(sourcePartition_1).build());
assertEquals("Unexpected number of partitions. ",1,sourceMetaStore.getPartitions(dbName,tableName).size());
HCatPartition addedPartition_1=sourceMetaStore.getPartition(dbName,tableName,partitionSpec_1);
assertEquals("Column schema doesn't match.",sourceTable.getCols(),addedPartition_1.getColumns());
assertEquals("InputFormat doesn't match.",sourceTable.getInputFileFormat(),addedPartition_1.getInputFormat());
assertEquals("OutputFormat doesn't match.",sourceTable.getOutputFileFormat(),addedPartition_1.getOutputFormat());
assertEquals("SerDe doesn't match.",sourceTable.getSerdeLib(),addedPartition_1.getSerDe());
assertEquals("SerDe params don't match.",sourceTable.getSerdeParams(),addedPartition_1.getSerdeParams());
HCatClient targetMetaStore=HCatClient.create(new Configuration(replicationTargetHCatConf));
targetMetaStore.dropDatabase(dbName,true,HCatClient.DropDBMode.CASCADE);
targetMetaStore.createDatabase(HCatCreateDBDesc.create(dbName).build());
HCatTable targetTable=targetMetaStore.deserializeTable(sourceMetaStore.serializeTable(sourceTable));
targetMetaStore.createTable(HCatCreateTableDesc.create(targetTable).build());
targetTable=targetMetaStore.getTable(dbName,tableName);
assertEquals("Created table doesn't match the source.",HCatTable.NO_DIFF,targetTable.diff(sourceTable));
List newColumnSchema=new ArrayList(columnSchema);
newColumnSchema.add(new HCatFieldSchema("goo_new",Type.DOUBLE,""));
Map tableParams=new HashMap(1);
tableParams.put("orc.compress","ZLIB");
sourceTable.cols(newColumnSchema).fileFormat("orcfile").tblProps(tableParams).serdeParam(serdeConstants.FIELD_DELIM,Character.toString('\001'));
sourceMetaStore.updateTableSchema(dbName,tableName,sourceTable);
sourceTable=sourceMetaStore.getTable(dbName,tableName);
Map partitionSpec_2=new HashMap();
partitionSpec_2.put("grid","AB");
partitionSpec_2.put("dt","2012_01_01");
HCatPartition sourcePartition_2=new HCatPartition(sourceTable,partitionSpec_2,makePartLocation(sourceTable,partitionSpec_2));
sourceMetaStore.addPartition(HCatAddPartitionDesc.create(sourcePartition_2).build());
List sourcePartitions=sourceMetaStore.getPartitions(dbName,tableName);
assertEquals("Unexpected number of source partitions.",2,sourcePartitions.size());
List addPartitionDescs=new ArrayList(sourcePartitions.size());
for ( HCatPartition partition : sourcePartitions) {
addPartitionDescs.add(HCatAddPartitionDesc.create(partition).build());
}
targetMetaStore.addPartitions(addPartitionDescs);
List targetPartitions=targetMetaStore.getPartitions(dbName,tableName);
assertEquals("Expected the same number of partitions. ",sourcePartitions.size(),targetPartitions.size());
for (int i=0; i < targetPartitions.size(); ++i) {
HCatPartition sourcePartition=sourcePartitions.get(i), targetPartition=targetPartitions.get(i);
assertEquals("Column schema doesn't match.",sourcePartition.getColumns(),targetPartition.getColumns());
assertEquals("InputFormat doesn't match.",sourcePartition.getInputFormat(),targetPartition.getInputFormat());
assertEquals("OutputFormat doesn't match.",sourcePartition.getOutputFormat(),targetPartition.getOutputFormat());
assertEquals("SerDe doesn't match.",sourcePartition.getSerDe(),targetPartition.getSerDe());
assertEquals("SerDe params don't match.",sourcePartition.getSerdeParams(),targetPartition.getSerdeParams());
}
}
catch ( Exception unexpected) {
LOG.error("Unexpected exception! ",unexpected);
assertTrue("Unexpected exception! " + unexpected.getMessage(),false);
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testGetPartitionsWithPartialSpec() throws Exception {
try {
HCatClient client=HCatClient.create(new Configuration(hcatConf));
final String dbName="myDb";
final String tableName="myTable";
client.dropDatabase(dbName,true,HCatClient.DropDBMode.CASCADE);
client.createDatabase(HCatCreateDBDesc.create(dbName).build());
List columnSchema=Arrays.asList(new HCatFieldSchema("foo",Type.INT,""),new HCatFieldSchema("bar",Type.STRING,""));
List partitionSchema=Arrays.asList(new HCatFieldSchema("dt",Type.STRING,""),new HCatFieldSchema("grid",Type.STRING,""));
HCatTable table=new HCatTable(dbName,tableName).cols(columnSchema).partCols(partitionSchema);
client.createTable(HCatCreateTableDesc.create(table,false).build());
table=client.getTable(dbName,tableName);
assertNotNull("The created just now can't be null.",table);
Map partitionSpec=new HashMap();
partitionSpec.put("grid","AB");
partitionSpec.put("dt","2011_12_31");
client.addPartition(HCatAddPartitionDesc.create(new HCatPartition(table,partitionSpec,makePartLocation(table,partitionSpec))).build());
partitionSpec.put("grid","AB");
partitionSpec.put("dt","2012_01_01");
client.addPartition(HCatAddPartitionDesc.create(new HCatPartition(table,partitionSpec,makePartLocation(table,partitionSpec))).build());
partitionSpec.put("dt","2012_01_01");
partitionSpec.put("grid","OB");
client.addPartition(HCatAddPartitionDesc.create(new HCatPartition(table,partitionSpec,makePartLocation(table,partitionSpec))).build());
partitionSpec.put("dt","2012_01_01");
partitionSpec.put("grid","XB");
client.addPartition(HCatAddPartitionDesc.create(new HCatPartition(table,partitionSpec,makePartLocation(table,partitionSpec))).build());
Map partialPartitionSpec=new HashMap();
partialPartitionSpec.put("dt","2012_01_01");
List partitions=client.getPartitions(dbName,tableName,partialPartitionSpec);
assertEquals("Unexpected number of partitions.",3,partitions.size());
assertArrayEquals("Mismatched partition.",new String[]{"2012_01_01","AB"},partitions.get(0).getValues().toArray());
assertArrayEquals("Mismatched partition.",new String[]{"2012_01_01","OB"},partitions.get(1).getValues().toArray());
assertArrayEquals("Mismatched partition.",new String[]{"2012_01_01","XB"},partitions.get(2).getValues().toArray());
client.dropDatabase(dbName,false,HCatClient.DropDBMode.CASCADE);
}
catch ( Exception unexpected) {
LOG.error("Unexpected exception!",unexpected);
assertTrue("Unexpected exception! " + unexpected.getMessage(),false);
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testObjectNotFoundException() throws Exception {
try {
HCatClient client=HCatClient.create(new Configuration(hcatConf));
String dbName="testObjectNotFoundException_DBName";
String tableName="testObjectNotFoundException_TableName";
client.dropDatabase(dbName,true,HCatClient.DropDBMode.CASCADE);
try {
client.getDatabase(dbName);
assertTrue("Expected ObjectNotFoundException.",false);
}
catch ( Exception exception) {
LOG.info("Got exception: ",exception);
assertTrue("Expected ObjectNotFoundException. Got:" + exception.getClass(),exception instanceof ObjectNotFoundException);
}
client.createDatabase(HCatCreateDBDesc.create(dbName).build());
try {
client.getTable(dbName,tableName);
assertTrue("Expected ObjectNotFoundException.",false);
}
catch ( Exception exception) {
LOG.info("Got exception: ",exception);
assertTrue("Expected ObjectNotFoundException. Got:" + exception.getClass(),exception instanceof ObjectNotFoundException);
}
String partitionColumn="part";
List columns=Arrays.asList(new HCatFieldSchema("col",Type.STRING,""));
ArrayList partitionColumns=new ArrayList(Arrays.asList(new HCatFieldSchema(partitionColumn,Type.STRING,"")));
HCatTable table=new HCatTable(dbName,tableName).cols(columns).partCols(partitionColumns);
client.createTable(HCatCreateTableDesc.create(table,false).build());
HCatTable createdTable=client.getTable(dbName,tableName);
Map partitionSpec=new HashMap();
partitionSpec.put(partitionColumn,"foobar");
try {
client.getPartition(dbName,tableName,partitionSpec);
assertTrue("Expected ObjectNotFoundException.",false);
}
catch ( Exception exception) {
LOG.info("Got exception: ",exception);
assertTrue("Expected ObjectNotFoundException. Got:" + exception.getClass(),exception instanceof ObjectNotFoundException);
}
client.addPartition(HCatAddPartitionDesc.create(new HCatPartition(createdTable,partitionSpec,makePartLocation(createdTable,partitionSpec))).build());
assertEquals("Expected empty set of partitions.",0,client.listPartitionsByFilter(dbName,tableName,partitionColumn + " < 'foobar'").size());
try {
partitionSpec.put("NonExistentKey","foobar");
client.getPartition(dbName,tableName,partitionSpec);
assertTrue("Expected HCatException.",false);
}
catch ( Exception exception) {
LOG.info("Got exception: ",exception);
assertTrue("Expected HCatException. Got:" + exception.getClass(),exception instanceof HCatException);
assertFalse("Did not expect ObjectNotFoundException.",exception instanceof ObjectNotFoundException);
}
}
catch ( Throwable t) {
LOG.error("Unexpected exception!",t);
assertTrue("Unexpected exception! " + t.getMessage(),false);
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testTransportFailure() throws Exception {
HCatClient client=HCatClient.create(new Configuration(hcatConf));
boolean isExceptionCaught=false;
final String tableName="Temptable" + new BigInteger(200,new Random()).toString(2);
ArrayList cols=new ArrayList();
cols.add(new HCatFieldSchema("id",Type.INT,"id columns"));
cols.add(new HCatFieldSchema("value",Type.STRING,"id columns"));
try {
HCatCreateTableDesc tableDesc=HCatCreateTableDesc.create(null,tableName,cols).fileFormat("rcfile").build();
client.createTable(tableDesc);
}
catch ( Exception exp) {
isExceptionCaught=true;
assertEquals("Unexpected exception type.",HCatException.class,exp.getClass());
client=HCatClient.create(new Configuration(hcatConf));
String newName="goodTable";
client.dropTable(null,newName,true);
HCatCreateTableDesc tableDesc2=HCatCreateTableDesc.create(null,newName,cols).fileFormat("rcfile").build();
client.createTable(tableDesc2);
HCatTable newTable=client.getTable(null,newName);
assertTrue(newTable != null);
assertTrue(newTable.getTableName().equalsIgnoreCase(newName));
}
finally {
client.close();
assertTrue("The expected exception was never thrown.",isExceptionCaught);
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testGetMessageBusTopicName() throws Exception {
try {
HCatClient client=HCatClient.create(new Configuration(hcatConf));
String dbName="testGetMessageBusTopicName_DBName";
String tableName="testGetMessageBusTopicName_TableName";
client.dropDatabase(dbName,true,HCatClient.DropDBMode.CASCADE);
client.createDatabase(HCatCreateDBDesc.create(dbName).build());
String messageBusTopicName="MY.topic.name";
Map tableProperties=new HashMap(1);
tableProperties.put(HCatConstants.HCAT_MSGBUS_TOPIC_NAME,messageBusTopicName);
client.createTable(HCatCreateTableDesc.create(dbName,tableName,Arrays.asList(new HCatFieldSchema("foo",Type.STRING,""))).tblProps(tableProperties).build());
assertEquals("MessageBus topic-name doesn't match!",messageBusTopicName,client.getMessageBusTopicName(dbName,tableName));
client.dropDatabase(dbName,true,HCatClient.DropDBMode.CASCADE);
client.close();
}
catch ( Exception exception) {
LOG.error("Unexpected exception.",exception);
assertTrue("Unexpected exception:" + exception.getMessage(),false);
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testUpdateTableSchema() throws Exception {
try {
HCatClient client=HCatClient.create(new Configuration(hcatConf));
final String dbName="testUpdateTableSchema_DBName";
final String tableName="testUpdateTableSchema_TableName";
client.dropDatabase(dbName,true,HCatClient.DropDBMode.CASCADE);
client.createDatabase(HCatCreateDBDesc.create(dbName).build());
List oldSchema=Arrays.asList(new HCatFieldSchema("foo",Type.INT,""),new HCatFieldSchema("bar",Type.STRING,""));
client.createTable(HCatCreateTableDesc.create(dbName,tableName,oldSchema).build());
List newSchema=Arrays.asList(new HCatFieldSchema("completely",Type.DOUBLE,""),new HCatFieldSchema("new",Type.STRING,""),new HCatFieldSchema("fields",Type.STRING,""));
client.updateTableSchema(dbName,tableName,newSchema);
assertArrayEquals(newSchema.toArray(),client.getTable(dbName,tableName).getCols().toArray());
client.dropDatabase(dbName,false,HCatClient.DropDBMode.CASCADE);
}
catch ( Exception exception) {
LOG.error("Unexpected exception.",exception);
assertTrue("Unexpected exception: " + exception.getMessage(),false);
}
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* Test for detecting schema-changes for an HCatalog table, across 2 different HCat instances.
* A table is created with the same schema on 2 HCat instances. The table-schema is modified on the source HCat
* instance (columns, I/O formats, SerDe definitions, etc.). The table metadata is compared between source
* and target, the changes are detected and propagated to target.
* @throws Exception
*/
@Test public void testTableSchemaPropagation() throws Exception {
try {
startReplicationTargetMetaStoreIfRequired();
HCatClient sourceMetaStore=HCatClient.create(new Configuration(hcatConf));
final String dbName="myDb";
final String tableName="myTable";
sourceMetaStore.dropDatabase(dbName,true,HCatClient.DropDBMode.CASCADE);
sourceMetaStore.createDatabase(HCatCreateDBDesc.create(dbName).build());
List columnSchema=Arrays.asList(new HCatFieldSchema("foo",Type.INT,""),new HCatFieldSchema("bar",Type.STRING,""));
List partitionSchema=Arrays.asList(new HCatFieldSchema("dt",Type.STRING,""),new HCatFieldSchema("grid",Type.STRING,""));
HCatTable sourceTable=new HCatTable(dbName,tableName).cols(columnSchema).partCols(partitionSchema);
sourceMetaStore.createTable(HCatCreateTableDesc.create(sourceTable).build());
sourceTable=sourceMetaStore.getTable(dbName,tableName);
assertNotNull("Table couldn't be queried for. ",sourceTable);
String tableStringRep=sourceMetaStore.serializeTable(sourceTable);
HCatClient targetMetaStore=HCatClient.create(new Configuration(replicationTargetHCatConf));
targetMetaStore.dropDatabase(dbName,true,HCatClient.DropDBMode.CASCADE);
targetMetaStore.createDatabase(HCatCreateDBDesc.create(dbName).build());
HCatTable targetTable=targetMetaStore.deserializeTable(tableStringRep);
assertEquals("Table after deserialization should have been identical to sourceTable.",HCatTable.NO_DIFF,sourceTable.diff(targetTable));
targetMetaStore.createTable(HCatCreateTableDesc.create(targetTable).build());
targetTable=targetMetaStore.getTable(dbName,tableName);
assertEquals("Table after deserialization should have been identical to sourceTable.",HCatTable.NO_DIFF,sourceTable.diff(targetTable));
List newColumnSchema=new ArrayList(columnSchema);
newColumnSchema.add(new HCatFieldSchema("goo_new",Type.DOUBLE,""));
Map tableParams=new HashMap(1);
tableParams.put("orc.compress","ZLIB");
sourceTable.cols(newColumnSchema).fileFormat("orcfile").tblProps(tableParams).serdeParam(serdeConstants.FIELD_DELIM,Character.toString('\001'));
sourceMetaStore.updateTableSchema(dbName,tableName,sourceTable);
sourceTable=sourceMetaStore.getTable(dbName,tableName);
EnumSet diff=targetTable.diff(sourceTable);
assertTrue("Couldn't find change in column-schema.",diff.contains(HCatTable.TableAttribute.COLUMNS));
assertTrue("Couldn't find change in InputFormat.",diff.contains(HCatTable.TableAttribute.INPUT_FORMAT));
assertTrue("Couldn't find change in OutputFormat.",diff.contains(HCatTable.TableAttribute.OUTPUT_FORMAT));
assertTrue("Couldn't find change in SerDe.",diff.contains(HCatTable.TableAttribute.SERDE));
assertTrue("Couldn't find change in SerDe parameters.",diff.contains(HCatTable.TableAttribute.SERDE_PROPERTIES));
assertTrue("Couldn't find change in Table parameters.",diff.contains(HCatTable.TableAttribute.TABLE_PROPERTIES));
targetMetaStore.updateTableSchema(dbName,tableName,targetTable.resolve(sourceTable,diff));
targetTable=targetMetaStore.getTable(dbName,tableName);
assertEquals("After propagating schema changes, source and target tables should have been equivalent.",HCatTable.NO_DIFF,targetTable.diff(sourceTable));
}
catch ( Exception unexpected) {
LOG.error("Unexpected exception!",unexpected);
assertTrue("Unexpected exception! " + unexpected.getMessage(),false);
}
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testDropPartitionsWithPartialSpec() throws Exception {
try {
HCatClient client=HCatClient.create(new Configuration(hcatConf));
final String dbName="myDb";
final String tableName="myTable";
client.dropDatabase(dbName,true,HCatClient.DropDBMode.CASCADE);
client.createDatabase(HCatCreateDBDesc.create(dbName).build());
List columnSchema=Arrays.asList(new HCatFieldSchema("foo",Type.INT,""),new HCatFieldSchema("bar",Type.STRING,""));
List partitionSchema=Arrays.asList(new HCatFieldSchema("dt",Type.STRING,""),new HCatFieldSchema("grid",Type.STRING,""));
HCatTable table=new HCatTable(dbName,tableName).cols(columnSchema).partCols(partitionSchema);
client.createTable(HCatCreateTableDesc.create(table,false).build());
table=client.getTable(dbName,tableName);
assertNotNull("Table couldn't be queried for. ",table);
Map partitionSpec=new HashMap();
partitionSpec.put("grid","AB");
partitionSpec.put("dt","2011_12_31");
client.addPartition(HCatAddPartitionDesc.create(new HCatPartition(table,partitionSpec,makePartLocation(table,partitionSpec))).build());
partitionSpec.put("grid","AB");
partitionSpec.put("dt","2012_01_01");
client.addPartition(HCatAddPartitionDesc.create(new HCatPartition(table,partitionSpec,makePartLocation(table,partitionSpec))).build());
partitionSpec.put("dt","2012_01_01");
partitionSpec.put("grid","OB");
client.addPartition(HCatAddPartitionDesc.create(new HCatPartition(table,partitionSpec,makePartLocation(table,partitionSpec))).build());
partitionSpec.put("dt","2012_01_01");
partitionSpec.put("grid","XB");
client.addPartition(HCatAddPartitionDesc.create(new HCatPartition(table,partitionSpec,makePartLocation(table,partitionSpec))).build());
Map partialPartitionSpec=new HashMap();
partialPartitionSpec.put("dt","2012_01_01");
client.dropPartitions(dbName,tableName,partialPartitionSpec,true);
List partitions=client.getPartitions(dbName,tableName);
assertEquals("Unexpected number of partitions.",1,partitions.size());
assertArrayEquals("Mismatched partition.",new String[]{"2011_12_31","AB"},partitions.get(0).getValues().toArray());
List partColumns=partitions.get(0).getPartColumns();
assertEquals(2,partColumns.size());
assertEquals("dt",partColumns.get(0).getName());
assertEquals("grid",partColumns.get(1).getName());
client.dropDatabase(dbName,false,HCatClient.DropDBMode.CASCADE);
}
catch ( Exception unexpected) {
LOG.error("Unexpected exception!",unexpected);
assertTrue("Unexpected exception! " + unexpected.getMessage(),false);
}
}
Class: org.apache.hive.hcatalog.api.TestHCatClientNotification APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void dropTable() throws Exception {
String dbName="default";
String tableName="hcatdroptable";
HCatTable table=new HCatTable(dbName,tableName);
table.cols(Arrays.asList(new HCatFieldSchema("onecol",TypeInfoFactory.stringTypeInfo,"")));
hCatClient.createTable(HCatCreateTableDesc.create(table).build());
hCatClient.dropTable(dbName,tableName,false);
List events=hCatClient.getNextNotification(firstEventId,0,null);
assertEquals(2,events.size());
HCatNotificationEvent event=events.get(1);
assertEquals(firstEventId + 2,event.getEventId());
assertTrue(event.getEventTime() >= startTime);
assertEquals(HCatConstants.HCAT_DROP_TABLE_EVENT,event.getEventType());
assertEquals(dbName,event.getDbName());
assertEquals(tableName,event.getTableName());
assertTrue(event.getMessage().matches("\\{\"eventType\":\"DROP_TABLE\",\"server\":\"\"," + "\"servicePrincipal\":\"\",\"db\":\"default\",\"table\":" + "\"hcatdroptable\",\"timestamp\":[0-9]+}"));
}
InternalCallVerifier EqualityVerifier
@Test public void filter() throws Exception {
hCatClient.createDatabase(HCatCreateDBDesc.create("hcatf1").build());
hCatClient.createDatabase(HCatCreateDBDesc.create("hcatf2").build());
hCatClient.dropDatabase("hcatf2",false,HCatClient.DropDBMode.RESTRICT);
IMetaStoreClient.NotificationFilter filter=new IMetaStoreClient.NotificationFilter(){
@Override public boolean accept( NotificationEvent event){
return event.getEventType().equals(HCatConstants.HCAT_DROP_DATABASE_EVENT);
}
}
;
List events=hCatClient.getNextNotification(firstEventId,0,filter);
assertEquals(1,events.size());
assertEquals(firstEventId + 3,events.get(0).getEventId());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void dropDatabase() throws Exception {
String dbname="hcatdropdb";
hCatClient.createDatabase(HCatCreateDBDesc.create(dbname).build());
hCatClient.dropDatabase(dbname,false,HCatClient.DropDBMode.RESTRICT);
List events=hCatClient.getNextNotification(firstEventId,0,null);
assertEquals(2,events.size());
HCatNotificationEvent event=events.get(1);
assertEquals(firstEventId + 2,event.getEventId());
assertTrue(event.getEventTime() >= startTime);
assertEquals(HCatConstants.HCAT_DROP_DATABASE_EVENT,event.getEventType());
assertEquals(dbname,event.getDbName());
assertNull(event.getTableName());
assertTrue(event.getMessage().matches("\\{\"eventType\":\"DROP_DATABASE\",\"server\":\"\"," + "\"servicePrincipal\":\"\",\"db\":\"hcatdropdb\",\"timestamp\":[0-9]+}"));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void addPartition() throws Exception {
String dbName="default";
String tableName="hcataddparttable";
String partColName="pc";
HCatTable table=new HCatTable(dbName,tableName);
table.partCol(new HCatFieldSchema(partColName,TypeInfoFactory.stringTypeInfo,""));
table.cols(Arrays.asList(new HCatFieldSchema("onecol",TypeInfoFactory.stringTypeInfo,"")));
hCatClient.createTable(HCatCreateTableDesc.create(table).build());
String partName="testpart";
Map partSpec=new HashMap(1);
partSpec.put(partColName,partName);
hCatClient.addPartition(HCatAddPartitionDesc.create(new HCatPartition(table,partSpec,null)).build());
List events=hCatClient.getNextNotification(firstEventId,0,null);
assertEquals(2,events.size());
HCatNotificationEvent event=events.get(1);
assertEquals(firstEventId + 2,event.getEventId());
assertTrue(event.getEventTime() >= startTime);
assertEquals(HCatConstants.HCAT_ADD_PARTITION_EVENT,event.getEventType());
assertEquals("default",event.getDbName());
assertEquals(tableName,event.getTableName());
assertTrue(event.getMessage().matches("\\{\"eventType\":\"ADD_PARTITION\",\"server\":\"\"," + "\"servicePrincipal\":\"\",\"db\":\"default\",\"table\":" + "\"hcataddparttable\",\"timestamp\":[0-9]+,\"partitions\":\\[\\{\"pc\":\"testpart\"}]}"));
}
InternalCallVerifier EqualityVerifier
@Test public void getOnlyMaxEvents() throws Exception {
hCatClient.createDatabase(HCatCreateDBDesc.create("hcatdb1").build());
hCatClient.createDatabase(HCatCreateDBDesc.create("hcatdb2").build());
hCatClient.createDatabase(HCatCreateDBDesc.create("hcatdb3").build());
List events=hCatClient.getNextNotification(firstEventId,2,null);
assertEquals(2,events.size());
assertEquals(firstEventId + 1,events.get(0).getEventId());
assertEquals(firstEventId + 2,events.get(1).getEventId());
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void createTable() throws Exception {
String dbName="default";
String tableName="hcatcreatetable";
HCatTable table=new HCatTable(dbName,tableName);
table.cols(Arrays.asList(new HCatFieldSchema("onecol",TypeInfoFactory.stringTypeInfo,"")));
hCatClient.createTable(HCatCreateTableDesc.create(table).build());
List events=hCatClient.getNextNotification(firstEventId,0,null);
assertEquals(1,events.size());
HCatNotificationEvent event=events.get(0);
assertEquals(firstEventId + 1,event.getEventId());
assertTrue(event.getEventTime() >= startTime);
assertEquals(HCatConstants.HCAT_CREATE_TABLE_EVENT,event.getEventType());
assertEquals(dbName,event.getDbName());
assertEquals("hcatcreatetable",event.getTableName());
assertTrue(event.getMessage().matches("\\{\"eventType\":\"CREATE_TABLE\",\"server\":\"\"," + "\"servicePrincipal\":\"\",\"db\":\"default\",\"table\":\"hcatcreatetable\",\"timestamp\":[0-9]+}"));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void createDatabase() throws Exception {
hCatClient.createDatabase(HCatCreateDBDesc.create("myhcatdb").build());
List events=hCatClient.getNextNotification(firstEventId,0,null);
assertEquals(1,events.size());
HCatNotificationEvent event=events.get(0);
assertEquals(firstEventId + 1,event.getEventId());
assertTrue(event.getEventTime() >= startTime);
assertEquals(HCatConstants.HCAT_CREATE_DATABASE_EVENT,event.getEventType());
assertEquals("myhcatdb",event.getDbName());
assertNull(event.getTableName());
assertTrue(event.getMessage().matches("\\{\"eventType\":\"CREATE_DATABASE\",\"server\":\"\"," + "\"servicePrincipal\":\"\",\"db\":\"myhcatdb\",\"timestamp\":[0-9]+}"));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void dropPartition() throws Exception {
String dbName="default";
String tableName="hcatdropparttable";
String partColName="pc";
HCatTable table=new HCatTable(dbName,tableName);
table.partCol(new HCatFieldSchema(partColName,TypeInfoFactory.stringTypeInfo,""));
table.cols(Arrays.asList(new HCatFieldSchema("onecol",TypeInfoFactory.stringTypeInfo,"")));
hCatClient.createTable(HCatCreateTableDesc.create(table).build());
String partName="testpart";
Map partSpec=new HashMap(1);
partSpec.put(partColName,partName);
hCatClient.addPartition(HCatAddPartitionDesc.create(new HCatPartition(table,partSpec,null)).build());
hCatClient.dropPartitions(dbName,tableName,partSpec,false);
List events=hCatClient.getNextNotification(firstEventId,0,null);
assertEquals(3,events.size());
HCatNotificationEvent event=events.get(2);
assertEquals(firstEventId + 3,event.getEventId());
assertTrue(event.getEventTime() >= startTime);
assertEquals(HCatConstants.HCAT_DROP_PARTITION_EVENT,event.getEventType());
assertEquals("default",event.getDbName());
assertEquals(tableName,event.getTableName());
assertTrue(event.getMessage().matches("\\{\"eventType\":\"DROP_PARTITION\",\"server\":\"\"," + "\"servicePrincipal\":\"\",\"db\":\"default\",\"table\":" + "\"hcatdropparttable\",\"timestamp\":[0-9]+,\"partitions\":\\[\\{\"pc\":\"testpart\"}]}"));
}
InternalCallVerifier EqualityVerifier
@Test public void filterWithMax() throws Exception {
hCatClient.createDatabase(HCatCreateDBDesc.create("hcatm1").build());
hCatClient.createDatabase(HCatCreateDBDesc.create("hcatm2").build());
hCatClient.dropDatabase("hcatm2",false,HCatClient.DropDBMode.RESTRICT);
IMetaStoreClient.NotificationFilter filter=new IMetaStoreClient.NotificationFilter(){
@Override public boolean accept( NotificationEvent event){
return event.getEventType().equals(HCatConstants.HCAT_CREATE_DATABASE_EVENT);
}
}
;
List events=hCatClient.getNextNotification(firstEventId,1,filter);
assertEquals(1,events.size());
assertEquals(firstEventId + 1,events.get(0).getEventId());
}
Class: org.apache.hive.hcatalog.api.repl.commands.TestCommands APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testBasicReplEximCommands() throws IOException, CommandNeedRetryException {
int evid=111;
String exportLocation=TEST_PATH + File.separator + "testBasicReplExim";
Path tempPath=new Path(TEST_PATH,"testBasicReplEximTmp");
String tempLocation=tempPath.toUri().getPath();
String dbName="exim";
String tableName="basicSrc";
String importedTableName="basicDst";
List cols=HCatSchemaUtils.getHCatSchema("b:string").getFields();
client.dropDatabase(dbName,true,HCatClient.DropDBMode.CASCADE);
client.createDatabase(HCatCreateDBDesc.create(dbName).ifNotExists(false).build());
HCatTable table=(new HCatTable(dbName,tableName)).cols(cols).fileFormat("textfile");
client.createTable(HCatCreateTableDesc.create(table).build());
HCatTable t=client.getTable(dbName,tableName);
assertNotNull(t);
String[] data=new String[]{"eleven","twelve"};
HcatTestUtils.createTestDataFile(tempLocation,data);
CommandProcessorResponse ret=driver.run("LOAD DATA LOCAL INPATH '" + tempLocation + "' OVERWRITE INTO TABLE "+ dbName+ "."+ tableName);
assertEquals(ret.getResponseCode() + ":" + ret.getErrorMessage(),null,ret.getException());
CommandProcessorResponse selectRet=driver.run("SELECT * from " + dbName + "."+ tableName);
assertEquals(selectRet.getResponseCode() + ":" + selectRet.getErrorMessage(),null,selectRet.getException());
List values=new ArrayList();
driver.getResults(values);
assertEquals(2,values.size());
assertEquals(data[0],values.get(0));
assertEquals(data[1],values.get(1));
ExportCommand exportCmd=new ExportCommand(dbName,tableName,null,exportLocation,false,evid);
LOG.info("About to run :" + exportCmd.get().get(0));
CommandProcessorResponse ret2=driver.run(exportCmd.get().get(0));
assertEquals(ret2.getResponseCode() + ":" + ret2.getErrorMessage(),null,ret2.getException());
List exportPaths=exportCmd.cleanupLocationsAfterEvent();
assertEquals(1,exportPaths.size());
String metadata=getMetadataContents(exportPaths.get(0));
LOG.info("Export returned the following _metadata contents:");
LOG.info(metadata);
assertTrue(metadata + "did not match \"repl.scope\"=\"all\"",metadata.matches(".*\"repl.scope\":\"all\".*"));
assertTrue(metadata + "has \"repl.last.id\"",metadata.matches(".*\"repl.last.id\":.*"));
ImportCommand importCmd=new ImportCommand(dbName,importedTableName,null,exportLocation,false,evid);
LOG.info("About to run :" + importCmd.get().get(0));
CommandProcessorResponse ret3=driver.run(importCmd.get().get(0));
assertEquals(ret3.getResponseCode() + ":" + ret3.getErrorMessage(),null,ret3.getException());
CommandProcessorResponse selectRet2=driver.run("SELECT * from " + dbName + "."+ importedTableName);
assertEquals(selectRet2.getResponseCode() + ":" + selectRet2.getErrorMessage(),null,selectRet2.getException());
List values2=new ArrayList();
driver.getResults(values2);
assertEquals(2,values2.size());
assertEquals(data[0],values2.get(0));
assertEquals(data[1],values2.get(1));
HCatTable importedTable=client.getTable(dbName,importedTableName);
assertNotNull(importedTable);
assertTrue(importedTable.getTblProps().containsKey("repl.last.id"));
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testNoopReplEximCommands() throws CommandNeedRetryException, IOException {
int evid=333;
String exportLocation=TEST_PATH + File.separator + "testNoopReplExim";
String dbName="doesNotExist" + System.currentTimeMillis();
String tableName="nope" + System.currentTimeMillis();
ExportCommand noopExportCmd=new ExportCommand(dbName,tableName,null,exportLocation,false,evid);
LOG.info("About to run :" + noopExportCmd.get().get(0));
CommandProcessorResponse ret=driver.run(noopExportCmd.get().get(0));
assertEquals(ret.getResponseCode() + ":" + ret.getErrorMessage(),null,ret.getException());
List exportPaths=noopExportCmd.cleanupLocationsAfterEvent();
assertEquals(1,exportPaths.size());
String metadata=getMetadataContents(exportPaths.get(0));
LOG.info("Export returned the following _metadata contents:");
LOG.info(metadata);
assertTrue(metadata + "did not match \"repl.noop\"=\"true\"",metadata.matches(".*\"repl.noop\":\"true\".*"));
ImportCommand noopImportCmd=new ImportCommand(dbName,tableName,null,exportLocation,false,evid);
LOG.info("About to run :" + noopImportCmd.get().get(0));
CommandProcessorResponse ret2=driver.run(noopImportCmd.get().get(0));
assertEquals(ret2.getResponseCode() + ":" + ret2.getErrorMessage(),null,ret2.getException());
Exception onfe=null;
try {
HCatDatabase d_doesNotExist=client.getDatabase(dbName);
}
catch ( Exception e) {
onfe=e;
}
assertNotNull(onfe);
assertTrue(onfe instanceof ObjectNotFoundException);
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testDropTableCommand() throws HCatException, CommandNeedRetryException {
String dbName="cmd_testdb";
String tableName="cmd_testtable";
int evid=789;
List cols=HCatSchemaUtils.getHCatSchema("a:int,b:string").getFields();
Command testReplicatedDropCmd=new DropTableCommand(dbName,tableName,true,evid);
assertEquals(evid,testReplicatedDropCmd.getEventId());
assertEquals(1,testReplicatedDropCmd.get().size());
assertEquals(true,testReplicatedDropCmd.isRetriable());
assertEquals(false,testReplicatedDropCmd.isUndoable());
CommandTestUtils.testCommandSerialization(testReplicatedDropCmd);
Command testNormalDropCmd=new DropTableCommand(dbName,tableName,false,evid);
assertEquals(evid,testNormalDropCmd.getEventId());
assertEquals(1,testNormalDropCmd.get().size());
assertEquals(true,testNormalDropCmd.isRetriable());
assertEquals(false,testNormalDropCmd.isUndoable());
CommandTestUtils.testCommandSerialization(testNormalDropCmd);
client.dropDatabase(dbName,true,HCatClient.DropDBMode.CASCADE);
client.createDatabase(HCatCreateDBDesc.create(dbName).ifNotExists(false).build());
Map tprops=new HashMap();
tprops.put(ReplicationUtils.REPL_STATE_ID,String.valueOf(evid + 5));
HCatTable tableToCreate=(new HCatTable(dbName,tableName)).tblProps(tprops).cols(cols);
client.createTable(HCatCreateTableDesc.create(tableToCreate).build());
HCatTable t1=client.getTable(dbName,tableName);
assertNotNull(t1);
LOG.info("About to run :" + testReplicatedDropCmd.get().get(0));
driver.run(testReplicatedDropCmd.get().get(0));
HCatTable t2=client.getTable(dbName,tableName);
assertNotNull(t2);
LOG.info("About to run :" + testNormalDropCmd.get().get(0));
driver.run(testNormalDropCmd.get().get(0));
Exception onfe=null;
try {
HCatTable t_del=client.getTable(dbName,tableName);
}
catch ( Exception e) {
onfe=e;
}
assertNotNull(onfe);
assertTrue(onfe instanceof ObjectNotFoundException);
Map tprops2=new HashMap();
tprops2.put(ReplicationUtils.REPL_STATE_ID,String.valueOf(evid - 5));
HCatTable tableToCreate2=(new HCatTable(dbName,tableName)).tblProps(tprops2).cols(cols);
client.createTable(HCatCreateTableDesc.create(tableToCreate2).build());
HCatTable t3=client.getTable(dbName,tableName);
assertNotNull(t3);
LOG.info("About to run :" + testReplicatedDropCmd.get().get(0));
driver.run(testReplicatedDropCmd.get().get(0));
Exception onfe2=null;
try {
HCatTable t_del=client.getTable(dbName,tableName);
}
catch ( Exception e) {
onfe2=e;
}
assertNotNull(onfe2);
assertTrue(onfe2 instanceof ObjectNotFoundException);
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testMetadataReplEximCommands() throws IOException, CommandNeedRetryException {
int evid=222;
String exportLocation=TEST_PATH + File.separator + "testMetadataReplExim";
Path tempPath=new Path(TEST_PATH,"testMetadataReplEximTmp");
String tempLocation=tempPath.toUri().getPath();
String dbName="exim";
String tableName="basicSrc";
String importedTableName="basicDst";
List cols=HCatSchemaUtils.getHCatSchema("b:string").getFields();
client.dropDatabase(dbName,true,HCatClient.DropDBMode.CASCADE);
client.createDatabase(HCatCreateDBDesc.create(dbName).ifNotExists(false).build());
HCatTable table=(new HCatTable(dbName,tableName)).cols(cols).fileFormat("textfile");
client.createTable(HCatCreateTableDesc.create(table).build());
HCatTable t=client.getTable(dbName,tableName);
assertNotNull(t);
String[] data=new String[]{"eleven","twelve"};
HcatTestUtils.createTestDataFile(tempLocation,data);
CommandProcessorResponse ret=driver.run("LOAD DATA LOCAL INPATH '" + tempLocation + "' OVERWRITE INTO TABLE "+ dbName+ "."+ tableName);
assertEquals(ret.getResponseCode() + ":" + ret.getErrorMessage(),null,ret.getException());
CommandProcessorResponse selectRet=driver.run("SELECT * from " + dbName + "."+ tableName);
assertEquals(selectRet.getResponseCode() + ":" + selectRet.getErrorMessage(),null,selectRet.getException());
List values=new ArrayList();
driver.getResults(values);
assertEquals(2,values.size());
assertEquals(data[0],values.get(0));
assertEquals(data[1],values.get(1));
ExportCommand exportMdCmd=new ExportCommand(dbName,tableName,null,exportLocation,true,evid);
LOG.info("About to run :" + exportMdCmd.get().get(0));
CommandProcessorResponse ret2=driver.run(exportMdCmd.get().get(0));
assertEquals(ret2.getResponseCode() + ":" + ret2.getErrorMessage(),null,ret2.getException());
List exportPaths=exportMdCmd.cleanupLocationsAfterEvent();
assertEquals(1,exportPaths.size());
String metadata=getMetadataContents(exportPaths.get(0));
LOG.info("Export returned the following _metadata contents:");
LOG.info(metadata);
assertTrue(metadata + "did not match \"repl.scope\"=\"metadata\"",metadata.matches(".*\"repl.scope\":\"metadata\".*"));
assertTrue(metadata + "has \"repl.last.id\"",metadata.matches(".*\"repl.last.id\":.*"));
ImportCommand importMdCmd=new ImportCommand(dbName,importedTableName,null,exportLocation,true,evid);
LOG.info("About to run :" + importMdCmd.get().get(0));
CommandProcessorResponse ret3=driver.run(importMdCmd.get().get(0));
assertEquals(ret3.getResponseCode() + ":" + ret3.getErrorMessage(),null,ret3.getException());
CommandProcessorResponse selectRet2=driver.run("SELECT * from " + dbName + "."+ importedTableName);
assertEquals(selectRet2.getResponseCode() + ":" + selectRet2.getErrorMessage(),null,selectRet2.getException());
List values2=new ArrayList();
driver.getResults(values2);
assertEquals(0,values2.size());
HCatTable importedTable=client.getTable(dbName,importedTableName);
assertNotNull(importedTable);
assertTrue(importedTable.getTblProps().containsKey("repl.last.id"));
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testDropDatabaseCommand() throws HCatException, CommandNeedRetryException {
String dbName="cmd_testdb";
int evid=999;
Command testCmd=new DropDatabaseCommand(dbName,evid);
assertEquals(evid,testCmd.getEventId());
assertEquals(1,testCmd.get().size());
assertEquals(true,testCmd.isRetriable());
assertEquals(false,testCmd.isUndoable());
CommandTestUtils.testCommandSerialization(testCmd);
client.dropDatabase(dbName,true,HCatClient.DropDBMode.CASCADE);
client.createDatabase(HCatCreateDBDesc.create(dbName).ifNotExists(false).build());
HCatDatabase db=client.getDatabase(dbName);
assertNotNull(db);
LOG.info("About to run :" + testCmd.get().get(0));
driver.run(testCmd.get().get(0));
Exception onfe=null;
try {
HCatDatabase db_del=client.getDatabase(dbName);
}
catch ( Exception e) {
onfe=e;
}
assertNotNull(onfe);
assertTrue(onfe instanceof ObjectNotFoundException);
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testDropPartitionCommand() throws HCatException, CommandNeedRetryException, MetaException {
String dbName="cmd_testdb";
String tableName="cmd_testtable";
int evid=789;
List pcols=HCatSchemaUtils.getHCatSchema("b:string").getFields();
List cols=HCatSchemaUtils.getHCatSchema("a:int").getFields();
Map ptnDesc=new HashMap();
ptnDesc.put("b","test");
Command testReplicatedDropPtnCmd=new DropPartitionCommand(dbName,tableName,ptnDesc,true,evid);
assertEquals(evid,testReplicatedDropPtnCmd.getEventId());
assertEquals(1,testReplicatedDropPtnCmd.get().size());
assertEquals(true,testReplicatedDropPtnCmd.isRetriable());
assertEquals(false,testReplicatedDropPtnCmd.isUndoable());
CommandTestUtils.testCommandSerialization(testReplicatedDropPtnCmd);
Command testNormalDropPtnCmd=new DropPartitionCommand(dbName,tableName,ptnDesc,false,evid);
assertEquals(evid,testNormalDropPtnCmd.getEventId());
assertEquals(1,testNormalDropPtnCmd.get().size());
assertEquals(true,testNormalDropPtnCmd.isRetriable());
assertEquals(false,testNormalDropPtnCmd.isUndoable());
CommandTestUtils.testCommandSerialization(testNormalDropPtnCmd);
client.dropDatabase(dbName,true,HCatClient.DropDBMode.CASCADE);
client.createDatabase(HCatCreateDBDesc.create(dbName).ifNotExists(false).build());
Map props=new HashMap();
props.put(ReplicationUtils.REPL_STATE_ID,String.valueOf(evid + 5));
HCatTable table=(new HCatTable(dbName,tableName)).tblProps(props).cols(cols).partCols(pcols);
client.createTable(HCatCreateTableDesc.create(table).build());
HCatTable tableCreated=client.getTable(dbName,tableName);
assertNotNull(tableCreated);
HCatPartition ptnToAdd=(new HCatPartition(tableCreated,ptnDesc,TestHCatClient.makePartLocation(tableCreated,ptnDesc))).parameters(props);
client.addPartition(HCatAddPartitionDesc.create(ptnToAdd).build());
HCatPartition p1=client.getPartition(dbName,tableName,ptnDesc);
assertNotNull(p1);
LOG.info("About to run :" + testReplicatedDropPtnCmd.get().get(0));
driver.run(testReplicatedDropPtnCmd.get().get(0));
HCatPartition p2=client.getPartition(dbName,tableName,ptnDesc);
assertNotNull(p2);
LOG.info("About to run :" + testNormalDropPtnCmd.get().get(0));
driver.run(testNormalDropPtnCmd.get().get(0));
Exception onfe=null;
try {
HCatPartition p_del=client.getPartition(dbName,tableName,ptnDesc);
}
catch ( Exception e) {
onfe=e;
}
assertNotNull(onfe);
assertTrue(onfe instanceof ObjectNotFoundException);
Map props2=new HashMap();
props2.put(ReplicationUtils.REPL_STATE_ID,String.valueOf(evid - 5));
HCatPartition ptnToAdd2=(new HCatPartition(tableCreated,ptnDesc,TestHCatClient.makePartLocation(tableCreated,ptnDesc))).parameters(props2);
client.addPartition(HCatAddPartitionDesc.create(ptnToAdd2).build());
HCatPartition p3=client.getPartition(dbName,tableName,ptnDesc);
assertNotNull(p3);
LOG.info("About to run :" + testReplicatedDropPtnCmd.get().get(0));
driver.run(testReplicatedDropPtnCmd.get().get(0));
Exception onfe2=null;
try {
HCatPartition p_del=client.getPartition(dbName,tableName,ptnDesc);
}
catch ( Exception e) {
onfe2=e;
}
assertNotNull(onfe2);
assertTrue(onfe2 instanceof ObjectNotFoundException);
}
Class: org.apache.hive.hcatalog.api.repl.commands.TestNoopCommand InternalCallVerifier EqualityVerifier
@Test public static void testCommand(){
int evid=999;
Command testCmd=new NoopCommand(evid);
assertEquals(evid,testCmd.getEventId());
assertEquals(0,testCmd.get().size());
assertEquals(true,testCmd.isRetriable());
assertEquals(true,testCmd.isUndoable());
assertEquals(0,testCmd.getUndo().size());
CommandTestUtils.testCommandSerialization(testCmd);
}
Class: org.apache.hive.hcatalog.api.repl.exim.TestEximReplicationTasks APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDropDb() throws IOException {
Database db=new Database();
db.setName("testdb");
NotificationEvent event=new NotificationEvent(getEventId(),getTime(),HCatConstants.HCAT_DROP_DATABASE_EVENT,msgFactory.buildCreateDatabaseMessage(db).toString());
event.setDbName(db.getName());
HCatNotificationEvent hev=new HCatNotificationEvent(event);
ReplicationTask rtask=ReplicationTask.create(client,hev);
assertEquals(hev.toString(),rtask.getEvent().toString());
verifyDropDbReplicationTask(rtask);
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testAddPartition() throws IOException {
Table t=new Table();
t.setDbName("testdb");
t.setTableName("testtable");
List pkeys=HCatSchemaUtils.getFieldSchemas(HCatSchemaUtils.getHCatSchema("a:int,b:string").getFields());
t.setPartitionKeys(pkeys);
List addedPtns=new ArrayList();
addedPtns.add(createPtn(t,Arrays.asList("120","abc")));
addedPtns.add(createPtn(t,Arrays.asList("201","xyz")));
NotificationEvent event=new NotificationEvent(getEventId(),getTime(),HCatConstants.HCAT_ADD_PARTITION_EVENT,msgFactory.buildAddPartitionMessage(t,addedPtns.iterator()).toString());
event.setDbName(t.getDbName());
event.setTableName(t.getTableName());
HCatNotificationEvent hev=new HCatNotificationEvent(event);
ReplicationTask rtask=ReplicationTask.create(client,hev);
assertEquals(hev.toString(),rtask.getEvent().toString());
verifyAddPartitionReplicationTask(rtask,t,addedPtns);
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testCreateDb(){
Database db=new Database();
db.setName("testdb");
NotificationEvent event=new NotificationEvent(getEventId(),getTime(),HCatConstants.HCAT_CREATE_DATABASE_EVENT,msgFactory.buildCreateDatabaseMessage(db).toString());
event.setDbName(db.getName());
HCatNotificationEvent hev=new HCatNotificationEvent(event);
ReplicationTask rtask=ReplicationTask.create(client,hev);
assertEquals(hev.toString(),rtask.getEvent().toString());
verifyCreateDbReplicationTask(rtask);
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testAlterPartition() throws HCatException {
Table t=new Table();
t.setDbName("testdb");
t.setTableName("testtable");
List pkeys=HCatSchemaUtils.getFieldSchemas(HCatSchemaUtils.getHCatSchema("a:int,b:string").getFields());
t.setPartitionKeys(pkeys);
Partition p=createPtn(t,Arrays.asList("102","lmn"));
NotificationEvent event=new NotificationEvent(getEventId(),getTime(),HCatConstants.HCAT_ALTER_PARTITION_EVENT,msgFactory.buildAlterPartitionMessage(t,p,p).toString());
event.setDbName(t.getDbName());
event.setTableName(t.getTableName());
HCatNotificationEvent hev=new HCatNotificationEvent(event);
ReplicationTask rtask=ReplicationTask.create(client,hev);
assertEquals(hev.toString(),rtask.getEvent().toString());
verifyAlterPartitionReplicationTask(rtask,t,p);
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testAlterTable() throws IOException {
Table t=new Table();
t.setDbName("testdb");
t.setTableName("testtable");
NotificationEvent event=new NotificationEvent(getEventId(),getTime(),HCatConstants.HCAT_ALTER_TABLE_EVENT,msgFactory.buildAlterTableMessage(t,t).toString());
event.setDbName(t.getDbName());
event.setTableName(t.getTableName());
HCatNotificationEvent hev=new HCatNotificationEvent(event);
ReplicationTask rtask=ReplicationTask.create(client,hev);
assertEquals(hev.toString(),rtask.getEvent().toString());
verifyAlterTableReplicationTask(rtask);
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testCreateTable() throws IOException {
Table t=new Table();
t.setDbName("testdb");
t.setTableName("testtable");
NotificationEvent event=new NotificationEvent(getEventId(),getTime(),HCatConstants.HCAT_CREATE_TABLE_EVENT,msgFactory.buildCreateTableMessage(t).toString());
event.setDbName(t.getDbName());
event.setTableName(t.getTableName());
HCatNotificationEvent hev=new HCatNotificationEvent(event);
ReplicationTask rtask=ReplicationTask.create(client,hev);
assertEquals(hev.toString(),rtask.getEvent().toString());
verifyCreateTableReplicationTask(rtask);
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDropTable() throws IOException {
Table t=new Table();
t.setDbName("testdb");
t.setTableName("testtable");
NotificationEvent event=new NotificationEvent(getEventId(),getTime(),HCatConstants.HCAT_DROP_TABLE_EVENT,msgFactory.buildDropTableMessage(t).toString());
event.setDbName(t.getDbName());
event.setTableName(t.getTableName());
HCatNotificationEvent hev=new HCatNotificationEvent(event);
ReplicationTask rtask=ReplicationTask.create(client,hev);
assertEquals(hev.toString(),rtask.getEvent().toString());
verifyDropTableReplicationTask(rtask);
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDropPartition() throws HCatException {
Table t=new Table();
t.setDbName("testdb");
t.setTableName("testtable");
List pkeys=HCatSchemaUtils.getFieldSchemas(HCatSchemaUtils.getHCatSchema("a:int,b:string").getFields());
t.setPartitionKeys(pkeys);
Partition p=createPtn(t,Arrays.asList("102","lmn"));
NotificationEvent event=new NotificationEvent(getEventId(),getTime(),HCatConstants.HCAT_DROP_PARTITION_EVENT,msgFactory.buildDropPartitionMessage(t,Collections.singletonList(p).iterator()).toString());
event.setDbName(t.getDbName());
event.setTableName(t.getTableName());
HCatNotificationEvent hev=new HCatNotificationEvent(event);
ReplicationTask rtask=ReplicationTask.create(client,hev);
assertEquals(hev.toString(),rtask.getEvent().toString());
verifyDropPartitionReplicationTask(rtask,t,p);
}
InternalCallVerifier EqualityVerifier
@Test public void testDebugMapper(){
assertEquals("BlahhalB",debugMapping.apply("Blah"));
assertEquals(null,debugMapping.apply(null));
assertEquals("",debugMapping.apply(""));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testInsert() throws HCatException {
Table t=new Table();
t.setDbName("testdb");
t.setTableName("testtable");
List pkeys=HCatSchemaUtils.getFieldSchemas(HCatSchemaUtils.getHCatSchema("a:int,b:string").getFields());
t.setPartitionKeys(pkeys);
Partition p=createPtn(t,Arrays.asList("102","lmn"));
List files=Arrays.asList("/tmp/test123");
NotificationEvent event=new NotificationEvent(getEventId(),getTime(),HCatConstants.HCAT_INSERT_EVENT,msgFactory.buildInsertMessage(t.getDbName(),t.getTableName(),getPtnDesc(t,p),files).toString());
event.setDbName(t.getDbName());
event.setTableName(t.getTableName());
HCatNotificationEvent hev=new HCatNotificationEvent(event);
ReplicationTask rtask=ReplicationTask.create(client,hev);
assertEquals(hev.toString(),rtask.getEvent().toString());
verifyInsertReplicationTask(rtask,t,p);
}
Class: org.apache.hive.hcatalog.cli.TestSemanticAnalysis InternalCallVerifier EqualityVerifier
@Test public void testStoredAs() throws CommandNeedRetryException {
hcatDriver.run("drop table junit_sem_analysis");
query="create table junit_sem_analysis (a int)";
CommandProcessorResponse response=hcatDriver.run(query);
assertEquals(0,response.getResponseCode());
hcatDriver.run("drop table junit_sem_analysis");
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testAddDriverInfo() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
hcatDriver.run("drop table junit_sem_analysis");
query="create table junit_sem_analysis (a int) partitioned by (b string) stored as " + "INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' OUTPUTFORMAT " + "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver' ";
assertEquals(0,hcatDriver.run(query).getResponseCode());
Table tbl=client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,TBL_NAME);
assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat());
assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat());
hcatDriver.run("drop table junit_sem_analysis");
}
InternalCallVerifier EqualityVerifier
@Test public void testChangeColumns() throws CommandNeedRetryException {
hcatDriver.run("drop table junit_sem_analysis");
hcatDriver.run("create table junit_sem_analysis (a int, c string) partitioned by (b string) stored as RCFILE");
CommandProcessorResponse response=hcatDriver.run("alter table junit_sem_analysis change a a1 int");
assertEquals(0,response.getResponseCode());
response=hcatDriver.run("alter table junit_sem_analysis change a1 a string");
assertEquals(0,response.getResponseCode());
response=hcatDriver.run("alter table junit_sem_analysis change a a int after c");
assertEquals(0,response.getResponseCode());
hcatDriver.run("drop table junit_sem_analysis");
}
InternalCallVerifier EqualityVerifier
@Test public void testCTLPass() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
try {
hcatDriver.run("drop table junit_sem_analysis");
}
catch ( Exception e) {
LOG.error("Error in drop table.",e);
}
query="create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE";
hcatDriver.run(query);
String likeTbl="like_table";
hcatDriver.run("drop table " + likeTbl);
query="create table like_table like junit_sem_analysis";
CommandProcessorResponse resp=hcatDriver.run(query);
assertEquals(0,resp.getResponseCode());
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testAddPartPass() throws IOException, CommandNeedRetryException {
hcatDriver.run("drop table junit_sem_analysis");
hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
CommandProcessorResponse response=hcatDriver.run("alter table junit_sem_analysis add partition (b='2') location '" + TEST_DATA_DIR + "'");
assertEquals(0,response.getResponseCode());
assertNull(response.getErrorMessage());
hcatDriver.run("drop table junit_sem_analysis");
}
InternalCallVerifier EqualityVerifier
@Test public void testAlterTblClusteredBy() throws CommandNeedRetryException {
hcatDriver.run("drop table junit_sem_analysis");
hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
CommandProcessorResponse response=hcatDriver.run("alter table junit_sem_analysis clustered by (a) into 7 buckets");
assertEquals(0,response.getResponseCode());
hcatDriver.run("drop table junit_sem_analysis");
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testCreateTableIfNotExists() throws MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
hcatDriver.run("drop table " + TBL_NAME);
hcatDriver.run("create table " + TBL_NAME + " (a int) stored as RCFILE");
Table tbl=client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,TBL_NAME);
List cols=tbl.getSd().getCols();
assertEquals(1,cols.size());
assertTrue(cols.get(0).equals(new FieldSchema("a","int",null)));
assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat());
assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat());
CommandProcessorResponse resp=hcatDriver.run("create table if not exists junit_sem_analysis (a int) stored as RCFILE");
assertEquals(0,resp.getResponseCode());
assertNull(resp.getErrorMessage());
tbl=client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,TBL_NAME);
cols=tbl.getSd().getCols();
assertEquals(1,cols.size());
assertTrue(cols.get(0).equals(new FieldSchema("a","int",null)));
assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat());
assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat());
hcatDriver.run("drop table junit_sem_analysis");
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testAlterTblFFpart() throws MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
driver.run("drop table junit_sem_analysis");
driver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as TEXTFILE");
driver.run("alter table junit_sem_analysis add partition (b='2010-10-10')");
hcatDriver.run("alter table junit_sem_analysis partition (b='2010-10-10') set fileformat RCFILE");
Table tbl=client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,TBL_NAME);
assertEquals(TextInputFormat.class.getName(),tbl.getSd().getInputFormat());
assertEquals(HiveIgnoreKeyTextOutputFormat.class.getName(),tbl.getSd().getOutputFormat());
List partVals=new ArrayList(1);
partVals.add("2010-10-10");
Partition part=client.getPartition(MetaStoreUtils.DEFAULT_DATABASE_NAME,TBL_NAME,partVals);
assertEquals(RCFileInputFormat.class.getName(),part.getSd().getInputFormat());
assertEquals(RCFileOutputFormat.class.getName(),part.getSd().getOutputFormat());
hcatDriver.run("drop table junit_sem_analysis");
}
InternalCallVerifier EqualityVerifier
@Test public void testInvalidateClusteredBy() throws IOException, CommandNeedRetryException {
hcatDriver.run("drop table junit_sem_analysis");
query="create table junit_sem_analysis (a int) partitioned by (b string) clustered by (a) into 10 buckets stored as TEXTFILE";
CommandProcessorResponse response=hcatDriver.run(query);
assertEquals(0,response.getResponseCode());
}
InternalCallVerifier EqualityVerifier
@Test public void testInvalidateTextFileStoredAs() throws IOException, CommandNeedRetryException {
hcatDriver.run("drop table junit_sem_analysis");
query="create table junit_sem_analysis (a int) partitioned by (b string) stored as TEXTFILE";
CommandProcessorResponse response=hcatDriver.run(query);
assertEquals(0,response.getResponseCode());
}
InternalCallVerifier EqualityVerifier
@Test public void testUsNonExistentDB() throws CommandNeedRetryException {
CommandProcessorResponse resp=hcatDriver.run("use no_such_db");
assertEquals(ErrorMsg.DATABASE_NOT_EXISTS.getErrorCode(),resp.getResponseCode());
}
InternalCallVerifier EqualityVerifier
@Test public void testInvalidateNonStringPartition() throws IOException, CommandNeedRetryException {
hcatDriver.run("drop table junit_sem_analysis");
query="create table junit_sem_analysis (a int) partitioned by (b int) stored as RCFILE";
CommandProcessorResponse response=hcatDriver.run(query);
assertEquals(40000,response.getResponseCode());
assertEquals("FAILED: SemanticException Operation not supported. HCatalog only supports partition columns of type string. For column: b Found type: int",response.getErrorMessage());
}
InternalCallVerifier EqualityVerifier
@Test public void testAlterTblTouch() throws CommandNeedRetryException {
hcatDriver.run("drop table junit_sem_analysis");
hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
CommandProcessorResponse response=hcatDriver.run("alter table junit_sem_analysis touch");
assertEquals(0,response.getResponseCode());
hcatDriver.run("alter table junit_sem_analysis touch partition (b='12')");
assertEquals(0,response.getResponseCode());
hcatDriver.run("drop table junit_sem_analysis");
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testCreateTblWithLowerCasePartNames() throws CommandNeedRetryException, MetaException, TException, NoSuchObjectException {
driver.run("drop table junit_sem_analysis");
CommandProcessorResponse resp=driver.run("create table junit_sem_analysis (a int) partitioned by (B string) stored as TEXTFILE");
assertEquals(resp.getResponseCode(),0);
assertEquals(null,resp.getErrorMessage());
Table tbl=client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,TBL_NAME);
assertEquals("Partition key name case problem","b",tbl.getPartitionKeys().get(0).getName());
driver.run("drop table junit_sem_analysis");
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testCTAS() throws CommandNeedRetryException {
hcatDriver.run("drop table junit_sem_analysis");
query="create table junit_sem_analysis (a int) as select * from tbl2";
CommandProcessorResponse response=hcatDriver.run(query);
assertEquals(40000,response.getResponseCode());
assertTrue(response.getErrorMessage().contains("FAILED: SemanticException Operation not supported. Create table as Select is not a valid operation."));
hcatDriver.run("drop table junit_sem_analysis");
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testAlterTableSetFF() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
hcatDriver.run("drop table junit_sem_analysis");
hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
Table tbl=client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,TBL_NAME);
assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat());
assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat());
hcatDriver.run("alter table junit_sem_analysis set fileformat INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' OUTPUTFORMAT " + "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver'");
hcatDriver.run("desc extended junit_sem_analysis");
tbl=client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,TBL_NAME);
assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat());
assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat());
hcatDriver.run("drop table junit_sem_analysis");
}
InternalCallVerifier EqualityVerifier
@Test public void testCTLFail() throws IOException, CommandNeedRetryException {
driver.run("drop table junit_sem_analysis");
driver.run("drop table like_table");
query="create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE";
driver.run(query);
query="create table like_table like junit_sem_analysis";
CommandProcessorResponse response=hcatDriver.run(query);
assertEquals(0,response.getResponseCode());
}
InternalCallVerifier EqualityVerifier
@Test public void testAddPartFail() throws CommandNeedRetryException {
driver.run("drop table junit_sem_analysis");
driver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
CommandProcessorResponse response=hcatDriver.run("alter table junit_sem_analysis add partition (b='2') location 'README.txt'");
assertEquals(0,response.getResponseCode());
driver.run("drop table junit_sem_analysis");
}
BranchVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testDatabaseOperations() throws MetaException, CommandNeedRetryException {
List dbs=client.getAllDatabases();
String testDb1="testdatabaseoperatons1";
String testDb2="testdatabaseoperatons2";
if (dbs.contains(testDb1.toLowerCase())) {
assertEquals(0,hcatDriver.run("drop database " + testDb1).getResponseCode());
}
if (dbs.contains(testDb2.toLowerCase())) {
assertEquals(0,hcatDriver.run("drop database " + testDb2).getResponseCode());
}
assertEquals(0,hcatDriver.run("create database " + testDb1).getResponseCode());
assertTrue(client.getAllDatabases().contains(testDb1));
assertEquals(0,hcatDriver.run("create database if not exists " + testDb1).getResponseCode());
assertTrue(client.getAllDatabases().contains(testDb1));
assertEquals(0,hcatDriver.run("create database if not exists " + testDb2).getResponseCode());
assertTrue(client.getAllDatabases().contains(testDb2));
assertEquals(0,hcatDriver.run("drop database " + testDb1).getResponseCode());
assertEquals(0,hcatDriver.run("drop database " + testDb2).getResponseCode());
assertFalse(client.getAllDatabases().contains(testDb1));
assertFalse(client.getAllDatabases().contains(testDb2));
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testDescDB() throws CommandNeedRetryException, IOException {
hcatDriver.run("drop database mydb cascade");
assertEquals(0,hcatDriver.run("create database mydb").getResponseCode());
CommandProcessorResponse resp=hcatDriver.run("describe database mydb");
assertEquals(0,resp.getResponseCode());
ArrayList result=new ArrayList();
hcatDriver.getResults(result);
assertTrue(result.get(0).contains("mydb"));
hcatDriver.run("drop database mydb cascade");
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testAddReplaceCols() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
hcatDriver.run("drop table junit_sem_analysis");
hcatDriver.run("create table junit_sem_analysis (a int, c string) partitioned by (b string) stored as RCFILE");
CommandProcessorResponse response=hcatDriver.run("alter table junit_sem_analysis replace columns (a1 tinyint)");
assertEquals(0,response.getResponseCode());
response=hcatDriver.run("alter table junit_sem_analysis add columns (d tinyint)");
assertEquals(0,response.getResponseCode());
assertNull(response.getErrorMessage());
response=hcatDriver.run("describe extended junit_sem_analysis");
assertEquals(0,response.getResponseCode());
Table tbl=client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,TBL_NAME);
List cols=tbl.getSd().getCols();
assertEquals(2,cols.size());
assertTrue(cols.get(0).equals(new FieldSchema("a1","tinyint",null)));
assertTrue(cols.get(1).equals(new FieldSchema("d","tinyint",null)));
hcatDriver.run("drop table junit_sem_analysis");
}
InternalCallVerifier EqualityVerifier
@Test public void testInvalidateSeqFileStoredAs() throws IOException, CommandNeedRetryException {
hcatDriver.run("drop table junit_sem_analysis");
query="create table junit_sem_analysis (a int) partitioned by (b string) stored as SEQUENCEFILE";
CommandProcessorResponse response=hcatDriver.run(query);
assertEquals(0,response.getResponseCode());
}
Class: org.apache.hive.hcatalog.common.TestHCatUtil APIUtilityVerifier IterativeVerifier BranchVerifier EqualityVerifier
@Test public void testFsPermissionOperation(){
HashMap permsCode=new HashMap();
for (int i=0; i < 8; i++) {
for (int j=0; j < 8; j++) {
for (int k=0; k < 8; k++) {
StringBuilder sb=new StringBuilder();
sb.append("0");
sb.append(i);
sb.append(j);
sb.append(k);
Integer code=(((i * 8) + j) * 8) + k;
String perms=(new FsPermission(Short.decode(sb.toString()))).toString();
if (permsCode.containsKey(perms)) {
Assert.assertEquals("permissions(" + perms + ") mapped to multiple codes",code,permsCode.get(perms));
}
permsCode.put(perms,code);
assertFsPermissionTransformationIsGood(perms);
}
}
}
}
EqualityVerifier
/**
* Hive represents tables in two ways:
*
* org.apache.hadoop.hive.metastore.api.Table - exactly whats stored in the metastore
* org.apache.hadoop.hive.ql.metadata.Table - adds business logic over api.Table
*
* Here we check SerDe-reported fields are included in the table schema.
*/
@Test public void testGetTableSchemaWithPtnColsSerDeReportedFields() throws IOException {
Map parameters=Maps.newHashMap();
parameters.put(serdeConstants.SERIALIZATION_CLASS,"org.apache.hadoop.hive.serde2.thrift.test.IntString");
parameters.put(serdeConstants.SERIALIZATION_FORMAT,"org.apache.thrift.protocol.TBinaryProtocol");
SerDeInfo serDeInfo=new SerDeInfo(null,"org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer",parameters);
StorageDescriptor sd=new StorageDescriptor(new ArrayList(),"location","org.apache.hadoop.mapred.TextInputFormat","org.apache.hadoop.mapred.TextOutputFormat",false,-1,serDeInfo,new ArrayList(),new ArrayList(),new HashMap());
org.apache.hadoop.hive.metastore.api.Table apiTable=new org.apache.hadoop.hive.metastore.api.Table("test_tblname","test_dbname","test_owner",0,0,0,sd,new ArrayList(),new HashMap(),"viewOriginalText","viewExpandedText",TableType.EXTERNAL_TABLE.name());
Table table=new Table(apiTable);
List expectedHCatSchema=Lists.newArrayList(new HCatFieldSchema("myint",HCatFieldSchema.Type.INT,null),new HCatFieldSchema("mystring",HCatFieldSchema.Type.STRING,null),new HCatFieldSchema("underscore_int",HCatFieldSchema.Type.INT,null));
Assert.assertEquals(new HCatSchema(expectedHCatSchema),HCatUtil.getTableSchemaWithPtnCols(table));
}
InternalCallVerifier EqualityVerifier
@Test public void testGetTableSchemaWithPtnColsApi() throws IOException {
StorageDescriptor sd=new StorageDescriptor(Lists.newArrayList(new FieldSchema("username",serdeConstants.STRING_TYPE_NAME,null)),"location","org.apache.hadoop.mapred.TextInputFormat","org.apache.hadoop.mapred.TextOutputFormat",false,-1,new SerDeInfo(),new ArrayList(),new ArrayList(),new HashMap());
org.apache.hadoop.hive.metastore.api.Table apiTable=new org.apache.hadoop.hive.metastore.api.Table("test_tblname","test_dbname","test_owner",0,0,0,sd,new ArrayList(),new HashMap(),"viewOriginalText","viewExpandedText",TableType.EXTERNAL_TABLE.name());
Table table=new Table(apiTable);
List expectedHCatSchema=Lists.newArrayList(new HCatFieldSchema("username",HCatFieldSchema.Type.STRING,null));
Assert.assertEquals(new HCatSchema(expectedHCatSchema),HCatUtil.getTableSchemaWithPtnCols(table));
List partitionKeys=Lists.newArrayList(new FieldSchema("dt",serdeConstants.STRING_TYPE_NAME,null));
table.getTTable().setPartitionKeys(partitionKeys);
expectedHCatSchema.add(new HCatFieldSchema("dt",HCatFieldSchema.Type.STRING,null));
Assert.assertEquals(new HCatSchema(expectedHCatSchema),HCatUtil.getTableSchemaWithPtnCols(table));
}
Class: org.apache.hive.hcatalog.common.TestHiveClientCache InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testCacheHit() throws IOException, MetaException, LoginException {
HiveClientCache cache=new HiveClientCache(1000);
HiveClientCache.ICacheableMetaStoreClient client=cache.get(hiveConf);
assertNotNull(client);
client.close();
hiveConf.setIntVar(HiveConf.ConfVars.DYNAMICPARTITIONMAXPARTS,10);
HiveClientCache.ICacheableMetaStoreClient client2=cache.get(hiveConf);
assertNotNull(client2);
assertEquals(client.getUsers(),client2.getUsers());
client2.close();
}
Class: org.apache.hive.hcatalog.data.TestLazyHCatRecord InternalCallVerifier EqualityVerifier
@Test public void testGetAll() throws Exception {
HCatRecord r=new LazyHCatRecord(getHCatRecord(),getObjectInspector());
List list=r.getAll();
Assert.assertEquals(INT_CONST,((Integer)list.get(0)).intValue());
Assert.assertEquals(LONG_CONST,((Long)list.get(1)).longValue());
Assert.assertEquals(DOUBLE_CONST,((Double)list.get(2)).doubleValue(),0);
Assert.assertEquals(STRING_CONST,(String)list.get(3));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testGetWithName() throws Exception {
TypeInfo ti=getTypeInfo();
HCatRecord r=new LazyHCatRecord(getHCatRecord(),getObjectInspector(ti));
HCatSchema schema=HCatSchemaUtils.getHCatSchema(ti).get(0).getStructSubSchema();
Assert.assertEquals(INT_CONST,((Integer)r.get("an_int",schema)).intValue());
Assert.assertEquals(LONG_CONST,((Long)r.get("a_long",schema)).longValue());
Assert.assertEquals(DOUBLE_CONST,((Double)r.get("a_double",schema)).doubleValue(),0);
Assert.assertEquals(STRING_CONST,(String)r.get("a_string",schema));
}
EqualityVerifier
@Test public void testSize() throws Exception {
HCatRecord r=new LazyHCatRecord(getHCatRecord(),getObjectInspector());
Assert.assertEquals(4,r.size());
}
InternalCallVerifier EqualityVerifier
@Test public void testGetWritable() throws Exception {
HCatRecord r=new LazyHCatRecord(getHCatRecord(),getObjectInspector()).getWritable();
Assert.assertEquals(INT_CONST,((Integer)r.get(0)).intValue());
Assert.assertEquals(LONG_CONST,((Long)r.get(1)).longValue());
Assert.assertEquals(DOUBLE_CONST,((Double)r.get(2)).doubleValue(),0);
Assert.assertEquals(STRING_CONST,(String)r.get(3));
Assert.assertEquals("org.apache.hive.hcatalog.data.DefaultHCatRecord",r.getClass().getName());
}
InternalCallVerifier EqualityVerifier
@Test public void testGet() throws Exception {
HCatRecord r=new LazyHCatRecord(getHCatRecord(),getObjectInspector());
Assert.assertEquals(INT_CONST,((Integer)r.get(0)).intValue());
Assert.assertEquals(LONG_CONST,((Long)r.get(1)).longValue());
Assert.assertEquals(DOUBLE_CONST,((Double)r.get(2)).doubleValue(),0);
Assert.assertEquals(STRING_CONST,(String)r.get(3));
}
Class: org.apache.hive.hcatalog.hbase.TestPigHBaseStorageHandler APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testPigFilterProjection() throws Exception {
Initialize();
String tableName=newTableName("MyTable");
String databaseName=newTableName("MyDatabase");
String hbaseTableName=(databaseName + "." + tableName).toLowerCase();
String db_dir=HCatUtil.makePathASafeFileName(getTestDir() + "/hbasedb");
String dbQuery="CREATE DATABASE IF NOT EXISTS " + databaseName + " LOCATION '"+ db_dir+ "'";
String deleteQuery="DROP TABLE " + databaseName + "."+ tableName;
String tableQuery="CREATE TABLE " + databaseName + "."+ tableName+ "(key int, testqualifier1 string, testqualifier2 string) STORED BY "+ "'org.apache.hadoop.hive.hbase.HBaseStorageHandler'"+ " WITH SERDEPROPERTIES ('hbase.columns.mapping'=':key,testFamily:testQualifier1,testFamily:testQualifier2')"+ " TBLPROPERTIES ('hbase.table.default.storage.type'='binary')";
CommandProcessorResponse responseOne=driver.run(deleteQuery);
assertEquals(0,responseOne.getResponseCode());
CommandProcessorResponse responseTwo=driver.run(dbQuery);
assertEquals(0,responseTwo.getResponseCode());
CommandProcessorResponse responseThree=driver.run(tableQuery);
HBaseAdmin hAdmin=new HBaseAdmin(getHbaseConf());
boolean doesTableExist=hAdmin.tableExists(hbaseTableName);
assertTrue(doesTableExist);
populateHBaseTable(hbaseTableName);
Configuration conf=new Configuration(getHbaseConf());
HTable table=new HTable(conf,hbaseTableName);
Scan scan=new Scan();
scan.addFamily(Bytes.toBytes("testFamily"));
ResultScanner scanner=table.getScanner(scan);
int index=1;
PigServer server=new PigServer(ExecType.LOCAL,hcatConf.getAllProperties());
server.registerQuery("A = load '" + databaseName + "."+ tableName+ "' using org.apache.hive.hcatalog.pig.HCatLoader();");
server.registerQuery("B = filter A by key < 5;");
server.registerQuery("C = foreach B generate key,testqualifier2;");
Iterator itr=server.openIterator("C");
while (itr.hasNext()) {
Tuple t=itr.next();
assertTrue(t.size() == 2);
assertTrue(t.get(0).getClass() == Integer.class);
assertEquals(index,t.get(0));
assertTrue(t.get(1).getClass() == String.class);
assertEquals("textB-" + index,t.get(1));
index++;
}
assertEquals(index - 1,4);
}
APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testPigPopulation() throws Exception {
Initialize();
String tableName=newTableName("MyTable");
String databaseName=newTableName("MyDatabase");
String hbaseTableName=(databaseName + "." + tableName).toLowerCase();
String db_dir=HCatUtil.makePathASafeFileName(getTestDir() + "/hbasedb");
String POPTXT_FILE_NAME=db_dir + "testfile.txt";
float f=-100.1f;
String dbQuery="CREATE DATABASE IF NOT EXISTS " + databaseName + " LOCATION '"+ db_dir+ "'";
String deleteQuery="DROP TABLE " + databaseName + "."+ tableName;
String tableQuery="CREATE TABLE " + databaseName + "."+ tableName+ "(key int, testqualifier1 float, testqualifier2 string) STORED BY "+ "'org.apache.hadoop.hive.hbase.HBaseStorageHandler'"+ " WITH SERDEPROPERTIES ('hbase.columns.mapping'=':key,testFamily:testQualifier1,testFamily:testQualifier2')"+ " TBLPROPERTIES ('hbase.table.default.storage.type'='binary')";
String selectQuery="SELECT * from " + databaseName.toLowerCase() + "."+ tableName.toLowerCase();
CommandProcessorResponse responseOne=driver.run(deleteQuery);
assertEquals(0,responseOne.getResponseCode());
CommandProcessorResponse responseTwo=driver.run(dbQuery);
assertEquals(0,responseTwo.getResponseCode());
CommandProcessorResponse responseThree=driver.run(tableQuery);
HBaseAdmin hAdmin=new HBaseAdmin(getHbaseConf());
boolean doesTableExist=hAdmin.tableExists(hbaseTableName);
assertTrue(doesTableExist);
createTestDataFile(POPTXT_FILE_NAME);
PigServer server=new PigServer(ExecType.LOCAL,hcatConf.getAllProperties());
server.registerQuery("A = load '" + POPTXT_FILE_NAME + "' using PigStorage() as (key:int, testqualifier1:float, testqualifier2:chararray);");
server.registerQuery("B = filter A by (key > 2) AND (key < 8) ;");
server.registerQuery("store B into '" + databaseName.toLowerCase() + "."+ tableName.toLowerCase()+ "' using org.apache.hive.hcatalog.pig.HCatStorer();");
server.registerQuery("C = load '" + databaseName.toLowerCase() + "."+ tableName.toLowerCase()+ "' using org.apache.hive.hcatalog.pig.HCatLoader();");
Schema dumpedBSchema=server.dumpSchema("C");
List fields=dumpedBSchema.getFields();
assertEquals(3,fields.size());
assertEquals(DataType.INTEGER,fields.get(0).type);
assertEquals("key",fields.get(0).alias.toLowerCase());
assertEquals(DataType.FLOAT,fields.get(1).type);
assertEquals("testQualifier1".toLowerCase(),fields.get(1).alias.toLowerCase());
assertEquals(DataType.CHARARRAY,fields.get(2).type);
assertEquals("testQualifier2".toLowerCase(),fields.get(2).alias.toLowerCase());
Configuration conf=new Configuration(getHbaseConf());
HTable table=new HTable(conf,hbaseTableName);
Scan scan=new Scan();
scan.addFamily(Bytes.toBytes("testFamily"));
byte[] familyNameBytes=Bytes.toBytes("testFamily");
ResultScanner scanner=table.getScanner(scan);
int index=3;
int count=0;
for ( Result result : scanner) {
assertEquals(index,Bytes.toInt(result.getRow()));
assertTrue(result.containsColumn(familyNameBytes,Bytes.toBytes("testQualifier1")));
assertEquals((index + f),Bytes.toFloat(result.getValue(familyNameBytes,Bytes.toBytes("testQualifier1"))),0);
assertTrue(result.containsColumn(familyNameBytes,Bytes.toBytes("testQualifier2")));
assertEquals(("textB-" + index).toString(),Bytes.toString(result.getValue(familyNameBytes,Bytes.toBytes("testQualifier2"))));
index++;
count++;
}
assertEquals(count,5);
driver.run(selectQuery);
ArrayList result=new ArrayList();
driver.getResults(result);
assertEquals(5,result.size());
Iterator itr=result.iterator();
for (int i=3; i <= 7; i++) {
String tokens[]=itr.next().split("\\s+");
assertEquals(i,Integer.parseInt(tokens[0]));
assertEquals(i + f,Float.parseFloat(tokens[1]),0);
assertEquals(("textB-" + i).toString(),tokens[2]);
}
CommandProcessorResponse responseFour=driver.run(deleteQuery);
assertEquals(0,responseFour.getResponseCode());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testPigHBaseSchema() throws Exception {
Initialize();
String tableName=newTableName("MyTable");
String databaseName=newTableName("MyDatabase");
String hbaseTableName="testTable";
String db_dir=HCatUtil.makePathASafeFileName(getTestDir() + "/hbasedb");
String dbQuery="CREATE DATABASE IF NOT EXISTS " + databaseName + " LOCATION '"+ db_dir+ "'";
String deleteQuery="DROP TABLE " + databaseName + "."+ tableName;
String tableQuery="CREATE TABLE " + databaseName + "."+ tableName+ "(key float, testqualifier1 string, testqualifier2 int) STORED BY "+ "'org.apache.hadoop.hive.hbase.HBaseStorageHandler'"+ " WITH SERDEPROPERTIES ('hbase.columns.mapping'=':key,testFamily:testQualifier1,testFamily:testQualifier2')"+ " TBLPROPERTIES ('hbase.table.name'='"+ hbaseTableName+ "')";
CommandProcessorResponse responseOne=driver.run(deleteQuery);
assertEquals(0,responseOne.getResponseCode());
CommandProcessorResponse responseTwo=driver.run(dbQuery);
assertEquals(0,responseTwo.getResponseCode());
CommandProcessorResponse responseThree=driver.run(tableQuery);
HBaseAdmin hAdmin=new HBaseAdmin(getHbaseConf());
boolean doesTableExist=hAdmin.tableExists(hbaseTableName);
assertTrue(doesTableExist);
PigServer server=new PigServer(ExecType.LOCAL,hcatConf.getAllProperties());
server.registerQuery("A = load '" + databaseName + "."+ tableName+ "' using org.apache.hive.hcatalog.pig.HCatLoader();");
Schema dumpedASchema=server.dumpSchema("A");
List fields=dumpedASchema.getFields();
assertEquals(3,fields.size());
assertEquals(DataType.FLOAT,fields.get(0).type);
assertEquals("key",fields.get(0).alias.toLowerCase());
assertEquals(DataType.CHARARRAY,fields.get(1).type);
assertEquals("testQualifier1".toLowerCase(),fields.get(1).alias.toLowerCase());
assertEquals(DataType.INTEGER,fields.get(2).type);
assertEquals("testQualifier2".toLowerCase(),fields.get(2).alias.toLowerCase());
}
Class: org.apache.hive.hcatalog.listener.TestDbNotificationListener InternalCallVerifier EqualityVerifier
@Test public void sqlTempTable() throws Exception {
LOG.info("XXX Starting temp table");
driver.run("create temporary table tmp1 (c int)");
driver.run("insert into table tmp1 values (1)");
NotificationEventResponse rsp=msClient.getNextNotification(firstEventId,0,null);
assertEquals(0,rsp.getEventsSize());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void dropPartition() throws Exception {
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
List partCols=new ArrayList();
partCols.add(new FieldSchema("ds","string",""));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
Table table=new Table("dropPartTable","default","me",startTime,startTime,0,sd,partCols,emptyParameters,null,null,null);
msClient.createTable(table);
Partition partition=new Partition(Arrays.asList("today"),"default","dropPartTable",startTime,startTime,sd,emptyParameters);
msClient.add_partition(partition);
msClient.dropPartition("default","dropparttable",Arrays.asList("today"),false);
NotificationEventResponse rsp=msClient.getNextNotification(firstEventId,0,null);
assertEquals(3,rsp.getEventsSize());
NotificationEvent event=rsp.getEvents().get(2);
assertEquals(firstEventId + 3,event.getEventId());
assertTrue(event.getEventTime() >= startTime);
assertEquals(HCatConstants.HCAT_DROP_PARTITION_EVENT,event.getEventType());
assertEquals("default",event.getDbName());
assertEquals("dropparttable",event.getTableName());
assertTrue(event.getMessage().matches("\\{\"eventType\":\"DROP_PARTITION\",\"server\":\"\"," + "\"servicePrincipal\":\"\",\"db\":\"default\",\"table\":" + "\"dropparttable\",\"timestamp\":[0-9]+,\"partitions\":\\[\\{\"ds\":\"today\"}]}"));
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void addPartition() throws Exception {
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
List partCols=new ArrayList();
partCols.add(new FieldSchema("ds","string",""));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
Table table=new Table("addPartTable","default","me",startTime,startTime,0,sd,partCols,emptyParameters,null,null,null);
msClient.createTable(table);
Partition partition=new Partition(Arrays.asList("today"),"default","addPartTable",startTime,startTime,sd,emptyParameters);
msClient.add_partition(partition);
NotificationEventResponse rsp=msClient.getNextNotification(firstEventId,0,null);
assertEquals(2,rsp.getEventsSize());
NotificationEvent event=rsp.getEvents().get(1);
assertEquals(firstEventId + 2,event.getEventId());
assertTrue(event.getEventTime() >= startTime);
assertEquals(HCatConstants.HCAT_ADD_PARTITION_EVENT,event.getEventType());
assertEquals("default",event.getDbName());
assertEquals("addparttable",event.getTableName());
assertTrue(event.getMessage().matches("\\{\"eventType\":\"ADD_PARTITION\",\"server\":\"\"," + "\"servicePrincipal\":\"\",\"db\":\"default\",\"table\":" + "\"addparttable\",\"timestamp\":[0-9]+,\"partitions\":\\[\\{\"ds\":\"today\"}]}"));
}
InternalCallVerifier EqualityVerifier
@Test public void getOnlyMaxEvents() throws Exception {
Database db=new Database("db1","no description","file:/tmp",emptyParameters);
msClient.createDatabase(db);
db=new Database("db2","no description","file:/tmp",emptyParameters);
msClient.createDatabase(db);
db=new Database("db3","no description","file:/tmp",emptyParameters);
msClient.createDatabase(db);
NotificationEventResponse rsp=msClient.getNextNotification(firstEventId,2,null);
assertEquals(2,rsp.getEventsSize());
assertEquals(firstEventId + 1,rsp.getEvents().get(0).getEventId());
assertEquals(firstEventId + 2,rsp.getEvents().get(1).getEventId());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void sqlCTAS() throws Exception {
driver.run("create table ctas_source (c int)");
driver.run("insert into table ctas_source values (1)");
driver.run("create table ctas_target as select c from ctas_source");
NotificationEventResponse rsp=msClient.getNextNotification(firstEventId,0,null);
assertEquals(6,rsp.getEventsSize());
NotificationEvent event=rsp.getEvents().get(0);
assertEquals(firstEventId + 1,event.getEventId());
assertEquals(HCatConstants.HCAT_CREATE_TABLE_EVENT,event.getEventType());
event=rsp.getEvents().get(2);
assertEquals(firstEventId + 3,event.getEventId());
assertEquals(HCatConstants.HCAT_INSERT_EVENT,event.getEventType());
assertTrue(event.getMessage().matches(".*\"files\":\\[\"pfile.*"));
event=rsp.getEvents().get(4);
assertEquals(firstEventId + 5,event.getEventId());
assertEquals(HCatConstants.HCAT_CREATE_TABLE_EVENT,event.getEventType());
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void createDatabase() throws Exception {
Database db=new Database("mydb","no description","file:/tmp",emptyParameters);
msClient.createDatabase(db);
NotificationEventResponse rsp=msClient.getNextNotification(firstEventId,0,null);
assertEquals(1,rsp.getEventsSize());
NotificationEvent event=rsp.getEvents().get(0);
assertEquals(firstEventId + 1,event.getEventId());
assertTrue(event.getEventTime() >= startTime);
assertEquals(HCatConstants.HCAT_CREATE_DATABASE_EVENT,event.getEventType());
assertEquals("mydb",event.getDbName());
assertNull(event.getTableName());
assertTrue(event.getMessage().matches("\\{\"eventType\":\"CREATE_DATABASE\",\"server\":\"\"," + "\"servicePrincipal\":\"\",\"db\":\"mydb\",\"timestamp\":[0-9]+}"));
}
InternalCallVerifier EqualityVerifier
@Test public void filter() throws Exception {
Database db=new Database("f1","no description","file:/tmp",emptyParameters);
msClient.createDatabase(db);
db=new Database("f2","no description","file:/tmp",emptyParameters);
msClient.createDatabase(db);
msClient.dropDatabase("f2");
IMetaStoreClient.NotificationFilter filter=new IMetaStoreClient.NotificationFilter(){
@Override public boolean accept( NotificationEvent event){
return event.getEventType().equals(HCatConstants.HCAT_DROP_DATABASE_EVENT);
}
}
;
NotificationEventResponse rsp=msClient.getNextNotification(firstEventId,0,filter);
assertEquals(1,rsp.getEventsSize());
assertEquals(firstEventId + 3,rsp.getEvents().get(0).getEventId());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void alterPartition() throws Exception {
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
List partCols=new ArrayList();
partCols.add(new FieldSchema("ds","string",""));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
Table table=new Table("alterparttable","default","me",startTime,startTime,0,sd,partCols,emptyParameters,null,null,null);
msClient.createTable(table);
Partition partition=new Partition(Arrays.asList("today"),"default","alterparttable",startTime,startTime,sd,emptyParameters);
msClient.add_partition(partition);
Partition newPart=new Partition(Arrays.asList("today"),"default","alterparttable",startTime,startTime + 1,sd,emptyParameters);
msClient.alter_partition("default","alterparttable",newPart);
NotificationEventResponse rsp=msClient.getNextNotification(firstEventId,0,null);
assertEquals(3,rsp.getEventsSize());
NotificationEvent event=rsp.getEvents().get(2);
assertEquals(firstEventId + 3,event.getEventId());
assertTrue(event.getEventTime() >= startTime);
assertEquals(HCatConstants.HCAT_ALTER_PARTITION_EVENT,event.getEventType());
assertEquals("default",event.getDbName());
assertEquals("alterparttable",event.getTableName());
assertTrue(event.getMessage(),event.getMessage().matches("\\{\"eventType\":\"ALTER_PARTITION\",\"server\":\"\"," + "\"servicePrincipal\":\"\",\"db\":\"default\",\"table\":\"alterparttable\"," + "\"timestamp\":[0-9]+,\"keyValues\":\\{\"ds\":\"today\"}}"));
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void insertPartition() throws Exception {
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
List partCols=new ArrayList();
partCols.add(new FieldSchema("ds","string",""));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
Table table=new Table("insertPartition","default","me",startTime,startTime,0,sd,partCols,emptyParameters,null,null,null);
msClient.createTable(table);
Partition partition=new Partition(Arrays.asList("today"),"default","insertPartition",startTime,startTime,sd,emptyParameters);
msClient.add_partition(partition);
FireEventRequestData data=new FireEventRequestData();
InsertEventRequestData insertData=new InsertEventRequestData();
data.setInsertData(insertData);
insertData.addToFilesAdded("/warehouse/mytable/today/b1");
FireEventRequest rqst=new FireEventRequest(true,data);
rqst.setDbName("default");
rqst.setTableName("insertPartition");
rqst.setPartitionVals(Arrays.asList("today"));
msClient.fireListenerEvent(rqst);
NotificationEventResponse rsp=msClient.getNextNotification(firstEventId,0,null);
assertEquals(3,rsp.getEventsSize());
NotificationEvent event=rsp.getEvents().get(2);
assertEquals(firstEventId + 3,event.getEventId());
assertTrue(event.getEventTime() >= startTime);
assertEquals(HCatConstants.HCAT_INSERT_EVENT,event.getEventType());
assertEquals("default",event.getDbName());
assertEquals("insertPartition",event.getTableName());
assertTrue(event.getMessage(),event.getMessage().matches("\\{\"eventType\":\"INSERT\",\"server\":\"\"," + "\"servicePrincipal\":\"\",\"db\":\"default\",\"table\":" + "\"insertPartition\",\"timestamp\":[0-9]+,"+ "\"files\":\\[\"/warehouse/mytable/today/b1\"],\"partKeyVals\":\\{\"ds\":\"today\"},"+ "\"partitionKeyValues\":\\{\"ds\":\"today\"}}"));
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void createTable() throws Exception {
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
Table table=new Table("mytable","default","me",startTime,startTime,0,sd,null,emptyParameters,null,null,null);
msClient.createTable(table);
NotificationEventResponse rsp=msClient.getNextNotification(firstEventId,0,null);
assertEquals(1,rsp.getEventsSize());
NotificationEvent event=rsp.getEvents().get(0);
assertEquals(firstEventId + 1,event.getEventId());
assertTrue(event.getEventTime() >= startTime);
assertEquals(HCatConstants.HCAT_CREATE_TABLE_EVENT,event.getEventType());
assertEquals("default",event.getDbName());
assertEquals("mytable",event.getTableName());
assertTrue(event.getMessage().matches("\\{\"eventType\":\"CREATE_TABLE\",\"server\":\"\"," + "\"servicePrincipal\":\"\",\"db\":\"default\",\"table\":\"mytable\",\"timestamp\":[0-9]+}"));
}
InternalCallVerifier EqualityVerifier
@Test public void sqlDb() throws Exception {
driver.run("create database sd");
driver.run("drop database sd");
NotificationEventResponse rsp=msClient.getNextNotification(firstEventId,0,null);
assertEquals(2,rsp.getEventsSize());
NotificationEvent event=rsp.getEvents().get(0);
assertEquals(firstEventId + 1,event.getEventId());
assertEquals(HCatConstants.HCAT_CREATE_DATABASE_EVENT,event.getEventType());
event=rsp.getEvents().get(1);
assertEquals(firstEventId + 2,event.getEventId());
assertEquals(HCatConstants.HCAT_DROP_DATABASE_EVENT,event.getEventType());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void dropTable() throws Exception {
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
Table table=new Table("droptable","default","me",startTime,startTime,0,sd,null,emptyParameters,null,null,null);
msClient.createTable(table);
msClient.dropTable("default","droptable");
NotificationEventResponse rsp=msClient.getNextNotification(firstEventId,0,null);
assertEquals(2,rsp.getEventsSize());
NotificationEvent event=rsp.getEvents().get(1);
assertEquals(firstEventId + 2,event.getEventId());
assertTrue(event.getEventTime() >= startTime);
assertEquals(HCatConstants.HCAT_DROP_TABLE_EVENT,event.getEventType());
assertEquals("default",event.getDbName());
assertEquals("droptable",event.getTableName());
assertTrue(event.getMessage().matches("\\{\"eventType\":\"DROP_TABLE\",\"server\":\"\"," + "\"servicePrincipal\":\"\",\"db\":\"default\",\"table\":" + "\"droptable\",\"timestamp\":[0-9]+}"));
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void insertTable() throws Exception {
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
Table table=new Table("insertTable","default","me",startTime,startTime,0,sd,null,emptyParameters,null,null,null);
msClient.createTable(table);
FireEventRequestData data=new FireEventRequestData();
InsertEventRequestData insertData=new InsertEventRequestData();
data.setInsertData(insertData);
insertData.addToFilesAdded("/warehouse/mytable/b1");
FireEventRequest rqst=new FireEventRequest(true,data);
rqst.setDbName("default");
rqst.setTableName("insertTable");
msClient.fireListenerEvent(rqst);
NotificationEventResponse rsp=msClient.getNextNotification(firstEventId,0,null);
assertEquals(2,rsp.getEventsSize());
NotificationEvent event=rsp.getEvents().get(1);
assertEquals(firstEventId + 2,event.getEventId());
assertTrue(event.getEventTime() >= startTime);
assertEquals(HCatConstants.HCAT_INSERT_EVENT,event.getEventType());
assertEquals("default",event.getDbName());
assertEquals("insertTable",event.getTableName());
assertTrue(event.getMessage(),event.getMessage().matches("\\{\"eventType\":\"INSERT\",\"server\":\"\"," + "\"servicePrincipal\":\"\",\"db\":\"default\",\"table\":" + "\"insertTable\",\"timestamp\":[0-9]+,\"files\":\\[\"/warehouse/mytable/b1\"],"+ "\"partKeyVals\":\\{},\"partitionKeyValues\":\\{}}"));
}
InternalCallVerifier EqualityVerifier
@Test public void filterWithMax() throws Exception {
Database db=new Database("f10","no description","file:/tmp",emptyParameters);
msClient.createDatabase(db);
db=new Database("f11","no description","file:/tmp",emptyParameters);
msClient.createDatabase(db);
msClient.dropDatabase("f11");
IMetaStoreClient.NotificationFilter filter=new IMetaStoreClient.NotificationFilter(){
@Override public boolean accept( NotificationEvent event){
return event.getEventType().equals(HCatConstants.HCAT_CREATE_DATABASE_EVENT);
}
}
;
NotificationEventResponse rsp=msClient.getNextNotification(firstEventId,1,filter);
assertEquals(1,rsp.getEventsSize());
assertEquals(firstEventId + 1,rsp.getEvents().get(0).getEventId());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void sqlInsertPartition() throws Exception {
driver.run("create table sip (c int) partitioned by (ds string)");
driver.run("insert into table sip partition (ds = 'today') values (1)");
driver.run("insert into table sip partition (ds = 'today') values (2)");
driver.run("insert into table sip partition (ds) values (3, 'today')");
driver.run("alter table sip add partition (ds = 'yesterday')");
driver.run("insert into table sip partition (ds = 'yesterday') values (2)");
driver.run("insert into table sip partition (ds) values (3, 'yesterday')");
driver.run("insert into table sip partition (ds) values (3, 'tomorrow')");
driver.run("alter table sip drop partition (ds = 'tomorrow')");
NotificationEventResponse rsp=msClient.getNextNotification(firstEventId,0,null);
for ( NotificationEvent ne : rsp.getEvents()) LOG.debug("EVENT: " + ne.getMessage());
assertEquals(19,rsp.getEventsSize());
NotificationEvent event=rsp.getEvents().get(1);
assertEquals(firstEventId + 2,event.getEventId());
assertEquals(HCatConstants.HCAT_ADD_PARTITION_EVENT,event.getEventType());
event=rsp.getEvents().get(3);
assertEquals(firstEventId + 4,event.getEventId());
assertEquals(HCatConstants.HCAT_INSERT_EVENT,event.getEventType());
assertTrue(event.getMessage().matches(".*\"files\":\\[\"pfile.*"));
event=rsp.getEvents().get(6);
assertEquals(firstEventId + 7,event.getEventId());
assertEquals(HCatConstants.HCAT_INSERT_EVENT,event.getEventType());
assertTrue(event.getMessage().matches(".*\"files\":\\[\"pfile.*"));
event=rsp.getEvents().get(9);
assertEquals(firstEventId + 10,event.getEventId());
assertEquals(HCatConstants.HCAT_ADD_PARTITION_EVENT,event.getEventType());
event=rsp.getEvents().get(10);
assertEquals(firstEventId + 11,event.getEventId());
assertEquals(HCatConstants.HCAT_INSERT_EVENT,event.getEventType());
assertTrue(event.getMessage().matches(".*\"files\":\\[\"pfile.*"));
event=rsp.getEvents().get(13);
assertEquals(firstEventId + 14,event.getEventId());
assertEquals(HCatConstants.HCAT_INSERT_EVENT,event.getEventType());
assertTrue(event.getMessage().matches(".*\"files\":\\[\"pfile.*"));
event=rsp.getEvents().get(16);
assertEquals(firstEventId + 17,event.getEventId());
assertEquals(HCatConstants.HCAT_ADD_PARTITION_EVENT,event.getEventType());
event=rsp.getEvents().get(18);
assertEquals(firstEventId + 19,event.getEventId());
assertEquals(HCatConstants.HCAT_DROP_PARTITION_EVENT,event.getEventType());
}
BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void dropDatabase() throws Exception {
Database db=new Database("dropdb","no description","file:/tmp",emptyParameters);
msClient.createDatabase(db);
msClient.dropDatabase("dropdb");
NotificationEventResponse rsp=msClient.getNextNotification(firstEventId,0,null);
assertEquals(2,rsp.getEventsSize());
NotificationEvent event=rsp.getEvents().get(1);
assertEquals(firstEventId + 2,event.getEventId());
assertTrue(event.getEventTime() >= startTime);
assertEquals(HCatConstants.HCAT_DROP_DATABASE_EVENT,event.getEventType());
assertEquals("dropdb",event.getDbName());
assertNull(event.getTableName());
assertTrue(event.getMessage().matches("\\{\"eventType\":\"DROP_DATABASE\",\"server\":\"\"," + "\"servicePrincipal\":\"\",\"db\":\"dropdb\",\"timestamp\":[0-9]+}"));
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void sqlInsertTable() throws Exception {
driver.run("create table sit (c int)");
driver.run("insert into table sit values (1)");
driver.run("alter table sit add columns (c2 int)");
driver.run("drop table sit");
NotificationEventResponse rsp=msClient.getNextNotification(firstEventId,0,null);
assertEquals(6,rsp.getEventsSize());
NotificationEvent event=rsp.getEvents().get(0);
assertEquals(firstEventId + 1,event.getEventId());
assertEquals(HCatConstants.HCAT_CREATE_TABLE_EVENT,event.getEventType());
event=rsp.getEvents().get(2);
assertEquals(firstEventId + 3,event.getEventId());
assertEquals(HCatConstants.HCAT_INSERT_EVENT,event.getEventType());
assertTrue(event.getMessage().matches(".*\"files\":\\[\"pfile.*"));
event=rsp.getEvents().get(4);
assertEquals(firstEventId + 5,event.getEventId());
assertEquals(HCatConstants.HCAT_ALTER_TABLE_EVENT,event.getEventType());
event=rsp.getEvents().get(5);
assertEquals(firstEventId + 6,event.getEventId());
assertEquals(HCatConstants.HCAT_DROP_TABLE_EVENT,event.getEventType());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void alterTable() throws Exception {
List cols=new ArrayList();
cols.add(new FieldSchema("col1","int","nocomment"));
SerDeInfo serde=new SerDeInfo("serde","seriallib",null);
StorageDescriptor sd=new StorageDescriptor(cols,"file:/tmp","input","output",false,0,serde,null,null,emptyParameters);
Table table=new Table("alttable","default","me",startTime,startTime,0,sd,new ArrayList(),emptyParameters,null,null,null);
msClient.createTable(table);
cols.add(new FieldSchema("col2","int",""));
table=new Table("alttable","default","me",startTime,startTime,0,sd,new ArrayList(),emptyParameters,null,null,null);
msClient.alter_table("default","alttable",table);
NotificationEventResponse rsp=msClient.getNextNotification(firstEventId,0,null);
assertEquals(2,rsp.getEventsSize());
NotificationEvent event=rsp.getEvents().get(1);
assertEquals(firstEventId + 2,event.getEventId());
assertTrue(event.getEventTime() >= startTime);
assertEquals(HCatConstants.HCAT_ALTER_TABLE_EVENT,event.getEventType());
assertEquals("default",event.getDbName());
assertEquals("alttable",event.getTableName());
assertTrue(event.getMessage().matches("\\{\"eventType\":\"ALTER_TABLE\",\"server\":\"\"," + "\"servicePrincipal\":\"\",\"db\":\"default\",\"table\":\"alttable\"," + "\"timestamp\":[0-9]+}"));
}
Class: org.apache.hive.hcatalog.listener.TestNotificationListener TestCleaner EqualityVerifier HybridVerifier
@After public void tearDown() throws Exception {
List expectedMessages=Arrays.asList(HCatConstants.HCAT_CREATE_DATABASE_EVENT,HCatConstants.HCAT_CREATE_TABLE_EVENT,HCatConstants.HCAT_ADD_PARTITION_EVENT,HCatConstants.HCAT_ALTER_PARTITION_EVENT,HCatConstants.HCAT_DROP_PARTITION_EVENT,HCatConstants.HCAT_ALTER_TABLE_EVENT,HCatConstants.HCAT_DROP_TABLE_EVENT,HCatConstants.HCAT_DROP_DATABASE_EVENT);
Assert.assertEquals(expectedMessages,actualMessages);
}
Class: org.apache.hive.hcatalog.mapreduce.TestHCatHiveCompatibility APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testUnpartedReadWrite() throws Exception {
driver.run("drop table if exists junit_unparted_noisd");
String createTable="create table junit_unparted_noisd(a int) stored as RCFILE";
Assert.assertEquals(0,driver.run(createTable).getResponseCode());
Table table=client.getTable("default","junit_unparted_noisd");
Assert.assertTrue(table.getSd().getInputFormat().equals(HCatConstants.HIVE_RCFILE_IF_CLASS));
PigServer server=new PigServer(ExecType.LOCAL);
logAndRegister(server,"A = load '" + INPUT_FILE_NAME + "' as (a:int);");
logAndRegister(server,"store A into 'default.junit_unparted_noisd' using org.apache.hive.hcatalog.pig.HCatStorer();");
logAndRegister(server,"B = load 'default.junit_unparted_noisd' using org.apache.hive.hcatalog.pig.HCatLoader();");
Iterator itr=server.openIterator("B");
int i=0;
while (itr.hasNext()) {
Tuple t=itr.next();
Assert.assertEquals(1,t.size());
Assert.assertEquals(t.get(0),i);
i++;
}
Assert.assertFalse(itr.hasNext());
Assert.assertEquals(11,i);
Table table2=client.getTable("default","junit_unparted_noisd");
Assert.assertTrue(table2.getSd().getInputFormat().equals(HCatConstants.HIVE_RCFILE_IF_CLASS));
driver.run("drop table junit_unparted_noisd");
}
APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testPartedRead() throws Exception {
driver.run("drop table if exists junit_parted_noisd");
String createTable="create table junit_parted_noisd(a int) partitioned by (b string) stored as RCFILE";
Assert.assertEquals(0,driver.run(createTable).getResponseCode());
Table table=client.getTable("default","junit_parted_noisd");
Assert.assertTrue(table.getSd().getInputFormat().equals(HCatConstants.HIVE_RCFILE_IF_CLASS));
PigServer server=new PigServer(ExecType.LOCAL);
logAndRegister(server,"A = load '" + INPUT_FILE_NAME + "' as (a:int);");
logAndRegister(server,"store A into 'default.junit_parted_noisd' using org.apache.hive.hcatalog.pig.HCatStorer('b=42');");
logAndRegister(server,"B = load 'default.junit_parted_noisd' using org.apache.hive.hcatalog.pig.HCatLoader();");
Iterator itr=server.openIterator("B");
int i=0;
while (itr.hasNext()) {
Tuple t=itr.next();
Assert.assertEquals(2,t.size());
Assert.assertEquals(t.get(0),i);
Assert.assertEquals(t.get(1),"42");
i++;
}
Assert.assertFalse(itr.hasNext());
Assert.assertEquals(11,i);
Table table2=client.getTable("default","junit_parted_noisd");
Assert.assertTrue(table2.getSd().getInputFormat().equals(HCatConstants.HIVE_RCFILE_IF_CLASS));
Partition ptn=client.getPartition("default","junit_parted_noisd",Arrays.asList("42"));
Assert.assertNotNull(ptn);
Assert.assertTrue(ptn.getSd().getInputFormat().equals(HCatConstants.HIVE_RCFILE_IF_CLASS));
driver.run("drop table junit_unparted_noisd");
}
Class: org.apache.hive.hcatalog.mapreduce.TestHCatHiveThriftCompatibility APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
/**
* Create a table with no explicit schema and ensure its correctly
* discovered from the thrift struct.
*/
@Test public void testDynamicCols() throws Exception {
Assert.assertEquals(0,driver.run("drop table if exists test_thrift").getResponseCode());
Assert.assertEquals(0,driver.run("create external table test_thrift " + "partitioned by (year string) " + "row format serde 'org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer' "+ "with serdeproperties ( "+ " 'serialization.class'='org.apache.hadoop.hive.serde2.thrift.test.IntString', "+ " 'serialization.format'='org.apache.thrift.protocol.TBinaryProtocol') "+ "stored as"+ " inputformat 'org.apache.hadoop.mapred.SequenceFileInputFormat'"+ " outputformat 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'").getResponseCode());
Assert.assertEquals(0,driver.run("alter table test_thrift add partition (year = '2012') location '" + intStringSeq.getParent() + "'").getResponseCode());
PigServer pigServer=new PigServer(ExecType.LOCAL);
pigServer.registerQuery("A = load 'test_thrift' using org.apache.hive.hcatalog.pig.HCatLoader();");
Schema expectedSchema=new Schema();
expectedSchema.add(new Schema.FieldSchema("myint",DataType.INTEGER));
expectedSchema.add(new Schema.FieldSchema("mystring",DataType.CHARARRAY));
expectedSchema.add(new Schema.FieldSchema("underscore_int",DataType.INTEGER));
expectedSchema.add(new Schema.FieldSchema("year",DataType.CHARARRAY));
Assert.assertEquals(expectedSchema,pigServer.dumpSchema("A"));
Iterator iterator=pigServer.openIterator("A");
Tuple t=iterator.next();
Assert.assertEquals(1,t.get(0));
Assert.assertEquals("one",t.get(1));
Assert.assertEquals(1,t.get(2));
Assert.assertEquals("2012",t.get(3));
Assert.assertFalse(iterator.hasNext());
}
Class: org.apache.hive.hcatalog.mapreduce.TestHCatInputFormat TestInitializer InternalCallVerifier EqualityVerifier HybridVerifier
/**
* Create an input sequence file with 100 records; every 10th record is bad.
* Load this table into Hive.
*/
@Before @Override public void setUp() throws Exception {
super.setUp();
if (setUpComplete) {
return;
}
Path intStringSeq=new Path(TEST_DATA_DIR + "/data/intString.seq");
LOG.info("Creating data file: " + intStringSeq);
SequenceFile.Writer seqFileWriter=SequenceFile.createWriter(intStringSeq.getFileSystem(hiveConf),hiveConf,intStringSeq,NullWritable.class,BytesWritable.class);
ByteArrayOutputStream out=new ByteArrayOutputStream();
TIOStreamTransport transport=new TIOStreamTransport(out);
TBinaryProtocol protocol=new TBinaryProtocol(transport);
for (int i=1; i <= 100; i++) {
if (i % 10 == 0) {
seqFileWriter.append(NullWritable.get(),new BytesWritable("bad record".getBytes()));
}
else {
out.reset();
IntString intString=new IntString(i,Integer.toString(i),i);
intString.write(protocol);
BytesWritable bytesWritable=new BytesWritable(out.toByteArray());
seqFileWriter.append(NullWritable.get(),bytesWritable);
}
}
seqFileWriter.close();
Assert.assertEquals(0,driver.run("drop table if exists test_bad_records").getResponseCode());
Assert.assertEquals(0,driver.run("create table test_bad_records " + "row format serde 'org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer' " + "with serdeproperties ( "+ " 'serialization.class'='org.apache.hadoop.hive.serde2.thrift.test.IntString', "+ " 'serialization.format'='org.apache.thrift.protocol.TBinaryProtocol') "+ "stored as"+ " inputformat 'org.apache.hadoop.mapred.SequenceFileInputFormat'"+ " outputformat 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'").getResponseCode());
Assert.assertEquals(0,driver.run("load data local inpath '" + intStringSeq.getParent() + "' into table test_bad_records").getResponseCode());
setUpComplete=true;
}
Class: org.apache.hive.hcatalog.mapreduce.TestHCatInputFormatMethods TestInitializer EqualityVerifier HybridVerifier
@Before @Override public void setUp() throws Exception {
super.setUp();
if (setUpComplete) {
return;
}
Assert.assertEquals(0,driver.run("drop table if exists testHCIFMethods").getResponseCode());
Assert.assertEquals(0,driver.run("create table testHCIFMethods (a string, b int) partitioned by (x string, y string)").getResponseCode());
setUpComplete=true;
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testGetPartitionAndDataColumns() throws Exception {
Configuration conf=new Configuration();
Job myJob=new Job(conf,"hcatTest");
HCatInputFormat.setInput(myJob,"default","testHCIFMethods");
HCatSchema cols=HCatInputFormat.getDataColumns(myJob.getConfiguration());
Assert.assertTrue(cols.getFields() != null);
Assert.assertEquals(cols.getFields().size(),2);
Assert.assertTrue(cols.getFields().get(0).getName().equals("a"));
Assert.assertTrue(cols.getFields().get(1).getName().equals("b"));
Assert.assertTrue(cols.getFields().get(0).getType().equals(HCatFieldSchema.Type.STRING));
Assert.assertTrue(cols.getFields().get(1).getType().equals(HCatFieldSchema.Type.INT));
HCatSchema pcols=HCatInputFormat.getPartitionColumns(myJob.getConfiguration());
Assert.assertTrue(pcols.getFields() != null);
Assert.assertEquals(pcols.getFields().size(),2);
Assert.assertTrue(pcols.getFields().get(0).getName().equals("x"));
Assert.assertTrue(pcols.getFields().get(1).getName().equals("y"));
Assert.assertTrue(pcols.getFields().get(0).getType().equals(HCatFieldSchema.Type.STRING));
Assert.assertTrue(pcols.getFields().get(1).getType().equals(HCatFieldSchema.Type.STRING));
}
Class: org.apache.hive.hcatalog.mapreduce.TestHCatMultiOutputFormat IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
/**
* Simple test case.
*
* Submits a mapred job which writes out one fixed line to each of the tables
* uses hive fetch task to read the data and see if it matches what was written
*
* @throws Exception if any error occurs
*/
@Test public void testOutputFormat() throws Throwable {
HashMap partitionValues=new HashMap();
partitionValues.put("ds","1");
partitionValues.put("cluster","ag");
ArrayList infoList=new ArrayList();
infoList.add(OutputJobInfo.create("default",tableNames[0],partitionValues));
infoList.add(OutputJobInfo.create("default",tableNames[1],partitionValues));
infoList.add(OutputJobInfo.create("default",tableNames[2],partitionValues));
Job job=new Job(hiveConf,"SampleJob");
job.setMapperClass(MyMapper.class);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(MultiOutputFormat.class);
job.setNumReduceTasks(0);
JobConfigurer configurer=MultiOutputFormat.createConfigurer(job);
for (int i=0; i < tableNames.length; i++) {
configurer.addOutputFormat(tableNames[i],HCatOutputFormat.class,BytesWritable.class,HCatRecord.class);
HCatOutputFormat.setOutput(configurer.getJob(tableNames[i]),infoList.get(i));
HCatOutputFormat.setSchema(configurer.getJob(tableNames[i]),schemaMap.get(tableNames[i]));
}
configurer.configure();
Path filePath=createInputFile();
FileInputFormat.addInputPath(job,filePath);
Assert.assertTrue(job.waitForCompletion(true));
ArrayList outputs=new ArrayList();
for ( String tbl : tableNames) {
outputs.add(getTableData(tbl,"default").get(0));
}
Assert.assertEquals("Comparing output of table " + tableNames[0] + " is not correct",outputs.get(0),"a,a,1,ag");
Assert.assertEquals("Comparing output of table " + tableNames[1] + " is not correct",outputs.get(1),"a,1,ag");
Assert.assertEquals("Comparing output of table " + tableNames[2] + " is not correct",outputs.get(2),"a,a,extra,1,ag");
for (int i=0; i < tableNames.length; i++) {
Path partitionFile=new Path(warehousedir + "/" + tableNames[i]+ "/ds=1/cluster=ag/part-m-00000");
FileSystem fs=partitionFile.getFileSystem(mrConf);
Assert.assertEquals("File permissions of table " + tableNames[i] + " is not correct",fs.getFileStatus(partitionFile).getPermission(),new FsPermission(tablePerms[i]));
Assert.assertEquals("File permissions of table " + tableNames[i] + " is not correct",fs.getFileStatus(partitionFile.getParent()).getPermission(),new FsPermission(tablePerms[i]));
Assert.assertEquals("File permissions of table " + tableNames[i] + " is not correct",fs.getFileStatus(partitionFile.getParent().getParent()).getPermission(),new FsPermission(tablePerms[i]));
}
LOG.info("File permissions verified");
}
Class: org.apache.hive.hcatalog.mapreduce.TestHCatNonPartitioned APIUtilityVerifier BranchVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testHCatNonPartitionedTable() throws Exception {
Map partitionMap=new HashMap();
runMRCreate(null,partitionColumns,writeRecords,10,true);
IOException exc=null;
try {
Job j=runMRCreate(null,partitionColumns,writeRecords,20,true);
assertEquals(!isTableImmutable(),j.isSuccessful());
}
catch ( IOException e) {
exc=e;
assertTrue(exc instanceof HCatException);
assertEquals(ErrorType.ERROR_NON_EMPTY_TABLE,((HCatException)exc).getErrorType());
}
if (!isTableImmutable()) {
assertNull(exc);
}
exc=null;
partitionMap.clear();
partitionMap.put("px","p1value2");
try {
Job j=runMRCreate(partitionMap,partitionColumns,writeRecords,20,true);
assertFalse(j.isSuccessful());
}
catch ( IOException e) {
exc=e;
assertTrue(exc != null);
assertTrue(exc instanceof HCatException);
assertEquals(ErrorType.ERROR_INVALID_PARTITION_VALUES,((HCatException)exc).getErrorType());
}
if (isTableImmutable()) {
runMRRead(10);
}
else {
runMRRead(30);
}
hiveReadTest();
}
Class: org.apache.hive.hcatalog.mapreduce.TestHCatPartitionPublish BranchVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testPartitionPublish() throws Exception {
String dbName="default";
String tableName="testHCatPartitionedTable";
createTable(null,tableName);
Map partitionMap=new HashMap();
partitionMap.put("part1","p1value1");
partitionMap.put("part0","p0value1");
ArrayList hcatTableColumns=new ArrayList();
for ( FieldSchema fs : getTableColumns()) {
hcatTableColumns.add(HCatSchemaUtils.getHCatFieldSchema(fs));
}
runMRCreateFail(dbName,tableName,partitionMap,hcatTableColumns);
List ptns=msc.listPartitionNames(dbName,tableName,(short)10);
Assert.assertEquals(0,ptns.size());
Table table=msc.getTable(dbName,tableName);
Assert.assertTrue(table != null);
if (!Shell.WINDOWS) {
Path path=new Path(table.getSd().getLocation() + "/part1=p1value1/part0=p0value1");
Assert.assertFalse(path.getFileSystem(conf).exists(path));
}
}
Class: org.apache.hive.hcatalog.mapreduce.TestHCatPartitioned APIUtilityVerifier BranchVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testHCatPartitionedTable() throws Exception {
Map partitionMap=new HashMap();
partitionMap.put("part1","p1value1");
partitionMap.put("part0","501");
runMRCreate(partitionMap,partitionColumns,writeRecords,10,true);
partitionMap.clear();
partitionMap.put("PART1","p1value2");
partitionMap.put("PART0","502");
runMRCreate(partitionMap,partitionColumns,writeRecords,20,true);
IOException exc=null;
try {
Job j=runMRCreate(partitionMap,partitionColumns,writeRecords,20,true);
assertEquals(!isTableImmutable(),j.isSuccessful());
}
catch ( IOException e) {
exc=e;
assertTrue(exc instanceof HCatException);
assertTrue(ErrorType.ERROR_DUPLICATE_PARTITION.equals(((HCatException)exc).getErrorType()));
}
if (!isTableImmutable()) {
assertNull(exc);
}
exc=null;
partitionMap.clear();
partitionMap.put("px1","p1value2");
partitionMap.put("px0","502");
try {
Job j=runMRCreate(partitionMap,partitionColumns,writeRecords,20,true);
assertFalse(j.isSuccessful());
}
catch ( IOException e) {
exc=e;
assertNotNull(exc);
assertTrue(exc instanceof HCatException);
assertEquals(ErrorType.ERROR_MISSING_PARTITION_KEY,((HCatException)exc).getErrorType());
}
exc=null;
partitionMap.clear();
partitionMap.put("px","512");
try {
runMRCreate(partitionMap,partitionColumns,writeRecords,20,true);
}
catch ( IOException e) {
exc=e;
}
assertNotNull(exc);
assertTrue(exc instanceof HCatException);
assertEquals(ErrorType.ERROR_INVALID_PARTITION_VALUES,((HCatException)exc).getErrorType());
exc=null;
try {
runMRCreate(null,partitionColumns,writeRecords,20,false);
}
catch ( IOException e) {
exc=e;
}
assertTrue(exc == null);
if (isTableImmutable()) {
runMRRead(30);
}
else {
runMRRead(50);
}
runMRRead(10,"part1 = \"p1value1\"");
runMRRead(10,"part0 = \"501\"");
if (isTableImmutable()) {
runMRRead(20,"part1 = \"p1value2\"");
runMRRead(30,"part1 = \"p1value1\" or part1 = \"p1value2\"");
runMRRead(20,"part0 = \"502\"");
runMRRead(30,"part0 = \"501\" or part0 = \"502\"");
}
else {
runMRRead(40,"part1 = \"p1value2\"");
runMRRead(50,"part1 = \"p1value1\" or part1 = \"p1value2\"");
runMRRead(40,"part0 = \"502\"");
runMRRead(50,"part0 = \"501\" or part0 = \"502\"");
}
tableSchemaTest();
columnOrderChangeTest();
hiveReadTest();
}
Class: org.apache.hive.hcatalog.mapreduce.TestInputJobInfo APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void test4ArgCreate() throws Exception {
Properties p=new Properties();
p.setProperty("key","value");
InputJobInfo jobInfo=InputJobInfo.create("Db","Table","Filter",p);
Assert.assertEquals("Db",jobInfo.getDatabaseName());
Assert.assertEquals("Table",jobInfo.getTableName());
Assert.assertEquals("Filter",jobInfo.getFilter());
Assert.assertEquals("value",jobInfo.getProperties().getProperty("key"));
}
Class: org.apache.hive.hcatalog.mapreduce.TestMultiOutputFormat APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
/**
* A test job that reads a input file and outputs each word and the index of
* the word encountered to a text file and sequence file with different key
* values.
*/
@Test public void testMultiOutputFormatWithoutReduce() throws Throwable {
Job job=new Job(mrConf,"MultiOutNoReduce");
job.setMapperClass(MultiOutWordIndexMapper.class);
job.setJarByClass(this.getClass());
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(MultiOutputFormat.class);
job.setNumReduceTasks(0);
JobConfigurer configurer=MultiOutputFormat.createConfigurer(job);
configurer.addOutputFormat("out1",TextOutputFormat.class,IntWritable.class,Text.class);
configurer.addOutputFormat("out2",SequenceFileOutputFormat.class,Text.class,IntWritable.class);
Path outDir=new Path(workDir.getPath(),job.getJobName());
FileOutputFormat.setOutputPath(configurer.getJob("out1"),new Path(outDir,"out1"));
FileOutputFormat.setOutputPath(configurer.getJob("out2"),new Path(outDir,"out2"));
String fileContent="Hello World";
String inputFile=createInputFile(fileContent);
FileInputFormat.setInputPaths(job,new Path(inputFile));
DistributedCache.addFileToClassPath(new Path(inputFile),job.getConfiguration(),fs);
String dummyFile=createInputFile("dummy file");
DistributedCache.addFileToClassPath(new Path(dummyFile),configurer.getJob("out1").getConfiguration(),fs);
DistributedCache.addFileToClassPath(new Path(inputFile),configurer.getJob("out2").getConfiguration(),fs);
configurer.configure();
Path[] fileClassPaths=DistributedCache.getFileClassPaths(job.getConfiguration());
List fileClassPathsList=Arrays.asList(fileClassPaths);
Assert.assertTrue("Cannot find " + (new Path(inputFile)) + " in "+ fileClassPathsList,fileClassPathsList.contains(new Path(inputFile)));
Assert.assertTrue("Cannot find " + (new Path(dummyFile)) + " in "+ fileClassPathsList,fileClassPathsList.contains(new Path(dummyFile)));
URI[] cacheFiles=DistributedCache.getCacheFiles(job.getConfiguration());
List cacheFilesList=Arrays.asList(cacheFiles);
URI inputFileURI=new Path(inputFile).makeQualified(fs).toUri();
Assert.assertTrue("Cannot find " + inputFileURI + " in "+ cacheFilesList,cacheFilesList.contains(inputFileURI));
URI dummyFileURI=new Path(dummyFile).makeQualified(fs).toUri();
Assert.assertTrue("Cannot find " + dummyFileURI + " in "+ cacheFilesList,cacheFilesList.contains(dummyFileURI));
Assert.assertTrue(job.waitForCompletion(true));
Path textOutPath=new Path(outDir,"out1/part-m-00000");
String[] textOutput=readFully(textOutPath).split("\n");
Path seqOutPath=new Path(outDir,"out2/part-m-00000");
SequenceFile.Reader reader=new SequenceFile.Reader(fs,seqOutPath,mrConf);
Text key=new Text();
IntWritable value=new IntWritable();
String[] words=fileContent.split(" ");
Assert.assertEquals(words.length,textOutput.length);
LOG.info("Verifying file contents");
for (int i=0; i < words.length; i++) {
Assert.assertEquals((i + 1) + "\t" + words[i],textOutput[i]);
reader.next(key,value);
Assert.assertEquals(words[i],key.toString());
Assert.assertEquals((i + 1),value.get());
}
Assert.assertFalse(reader.next(key,value));
}
APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
/**
* A word count test job that reads a input file and outputs the count of
* words to a text file and sequence file with different key values.
*/
@Test public void testMultiOutputFormatWithReduce() throws Throwable {
Job job=new Job(mrConf,"MultiOutWithReduce");
job.setMapperClass(WordCountMapper.class);
job.setReducerClass(MultiOutWordCountReducer.class);
job.setJarByClass(this.getClass());
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(MultiOutputFormat.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
JobConfigurer configurer=MultiOutputFormat.createConfigurer(job);
configurer.addOutputFormat("out1",TextOutputFormat.class,IntWritable.class,Text.class);
configurer.addOutputFormat("out2",SequenceFileOutputFormat.class,Text.class,IntWritable.class);
configurer.addOutputFormat("out3",NullOutputFormat.class,Text.class,IntWritable.class);
Path outDir=new Path(workDir.getPath(),job.getJobName());
FileOutputFormat.setOutputPath(configurer.getJob("out1"),new Path(outDir,"out1"));
FileOutputFormat.setOutputPath(configurer.getJob("out2"),new Path(outDir,"out2"));
configurer.configure();
String fileContent="Hello World Hello World World";
String inputFile=createInputFile(fileContent);
FileInputFormat.setInputPaths(job,new Path(inputFile));
Assert.assertTrue(job.waitForCompletion(true));
Path textOutPath=new Path(outDir,"out1/part-r-00000");
String[] textOutput=readFully(textOutPath).split("\n");
Path seqOutPath=new Path(outDir,"out2/part-r-00000");
SequenceFile.Reader reader=new SequenceFile.Reader(fs,seqOutPath,mrConf);
Text key=new Text();
IntWritable value=new IntWritable();
String[] words="Hello World".split(" ");
Assert.assertEquals(words.length,textOutput.length);
for (int i=0; i < words.length; i++) {
Assert.assertEquals((i + 2) + "\t" + words[i],textOutput[i]);
reader.next(key,value);
Assert.assertEquals(words[i],key.toString());
Assert.assertEquals((i + 2),value.get());
}
Assert.assertFalse(reader.next(key,value));
}
Class: org.apache.hive.hcatalog.mapreduce.TestSequenceFileReadWrite APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testSequenceTableWriteRead() throws Exception {
String createTable="CREATE TABLE demo_table(a0 int, a1 String, a2 String) STORED AS SEQUENCEFILE";
driver.run("drop table demo_table");
int retCode1=driver.run(createTable).getResponseCode();
assertTrue(retCode1 == 0);
server.setBatchOn();
server.registerQuery("A = load '" + inputFileName + "' using PigStorage(',') as (a0:int,a1:chararray,a2:chararray);");
server.registerQuery("store A into 'demo_table' using org.apache.hive.hcatalog.pig.HCatStorer();");
server.executeBatch();
server.registerQuery("B = load 'demo_table' using org.apache.hive.hcatalog.pig.HCatLoader();");
Iterator XIter=server.openIterator("B");
int numTuplesRead=0;
while (XIter.hasNext()) {
Tuple t=XIter.next();
assertEquals(3,t.size());
assertEquals(t.get(0).toString(),"" + numTuplesRead);
assertEquals(t.get(1).toString(),"a" + numTuplesRead);
assertEquals(t.get(2).toString(),"b" + numTuplesRead);
numTuplesRead++;
}
assertEquals(input.length,numTuplesRead);
}
APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testSequenceTableWriteReadMR() throws Exception {
String createTable="CREATE TABLE demo_table_2(a0 int, a1 String, a2 String) STORED AS SEQUENCEFILE";
driver.run("drop table demo_table_2");
int retCode1=driver.run(createTable).getResponseCode();
assertTrue(retCode1 == 0);
Configuration conf=new Configuration();
conf.set(HCatConstants.HCAT_KEY_HIVE_CONF,HCatUtil.serialize(hiveConf.getAllProperties()));
Job job=new Job(conf,"Write-hcat-seq-table");
job.setJarByClass(TestSequenceFileReadWrite.class);
job.setMapperClass(Map.class);
job.setOutputKeyClass(NullWritable.class);
job.setOutputValueClass(DefaultHCatRecord.class);
job.setInputFormatClass(TextInputFormat.class);
TextInputFormat.setInputPaths(job,inputFileName);
HCatOutputFormat.setOutput(job,OutputJobInfo.create(MetaStoreUtils.DEFAULT_DATABASE_NAME,"demo_table_2",null));
job.setOutputFormatClass(HCatOutputFormat.class);
HCatOutputFormat.setSchema(job,getSchema());
job.setNumReduceTasks(0);
assertTrue(job.waitForCompletion(true));
if (!HCatUtil.isHadoop23()) {
new FileOutputCommitterContainer(job,null).commitJob(job);
}
assertTrue(job.isSuccessful());
server.setBatchOn();
server.registerQuery("C = load 'default.demo_table_2' using org.apache.hive.hcatalog.pig.HCatLoader();");
server.executeBatch();
Iterator XIter=server.openIterator("C");
int numTuplesRead=0;
while (XIter.hasNext()) {
Tuple t=XIter.next();
assertEquals(3,t.size());
assertEquals(t.get(0).toString(),"" + numTuplesRead);
assertEquals(t.get(1).toString(),"a" + numTuplesRead);
assertEquals(t.get(2).toString(),"b" + numTuplesRead);
numTuplesRead++;
}
assertEquals(input.length,numTuplesRead);
}
APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testTextTableWriteReadMR() throws Exception {
String createTable="CREATE TABLE demo_table_3(a0 int, a1 String, a2 String) STORED AS TEXTFILE";
driver.run("drop table demo_table_3");
int retCode1=driver.run(createTable).getResponseCode();
assertTrue(retCode1 == 0);
Configuration conf=new Configuration();
conf.set(HCatConstants.HCAT_KEY_HIVE_CONF,HCatUtil.serialize(hiveConf.getAllProperties()));
Job job=new Job(conf,"Write-hcat-text-table");
job.setJarByClass(TestSequenceFileReadWrite.class);
job.setMapperClass(Map.class);
job.setOutputKeyClass(NullWritable.class);
job.setOutputValueClass(DefaultHCatRecord.class);
job.setInputFormatClass(TextInputFormat.class);
job.setNumReduceTasks(0);
TextInputFormat.setInputPaths(job,inputFileName);
HCatOutputFormat.setOutput(job,OutputJobInfo.create(MetaStoreUtils.DEFAULT_DATABASE_NAME,"demo_table_3",null));
job.setOutputFormatClass(HCatOutputFormat.class);
HCatOutputFormat.setSchema(job,getSchema());
assertTrue(job.waitForCompletion(true));
if (!HCatUtil.isHadoop23()) {
new FileOutputCommitterContainer(job,null).commitJob(job);
}
assertTrue(job.isSuccessful());
server.setBatchOn();
server.registerQuery("D = load 'default.demo_table_3' using org.apache.hive.hcatalog.pig.HCatLoader();");
server.executeBatch();
Iterator XIter=server.openIterator("D");
int numTuplesRead=0;
while (XIter.hasNext()) {
Tuple t=XIter.next();
assertEquals(3,t.size());
assertEquals(t.get(0).toString(),"" + numTuplesRead);
assertEquals(t.get(1).toString(),"a" + numTuplesRead);
assertEquals(t.get(2).toString(),"b" + numTuplesRead);
numTuplesRead++;
}
assertEquals(input.length,numTuplesRead);
}
APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testTextTableWriteRead() throws Exception {
String createTable="CREATE TABLE demo_table_1(a0 int, a1 String, a2 String) STORED AS TEXTFILE";
driver.run("drop table demo_table_1");
int retCode1=driver.run(createTable).getResponseCode();
assertTrue(retCode1 == 0);
server.setBatchOn();
server.registerQuery("A = load '" + inputFileName + "' using PigStorage(',') as (a0:int,a1:chararray,a2:chararray);");
server.registerQuery("store A into 'demo_table_1' using org.apache.hive.hcatalog.pig.HCatStorer();");
server.executeBatch();
server.registerQuery("B = load 'demo_table_1' using org.apache.hive.hcatalog.pig.HCatLoader();");
Iterator XIter=server.openIterator("B");
int numTuplesRead=0;
while (XIter.hasNext()) {
Tuple t=XIter.next();
assertEquals(3,t.size());
assertEquals(t.get(0).toString(),"" + numTuplesRead);
assertEquals(t.get(1).toString(),"a" + numTuplesRead);
assertEquals(t.get(2).toString(),"b" + numTuplesRead);
numTuplesRead++;
}
assertEquals(input.length,numTuplesRead);
}
Class: org.apache.hive.hcatalog.pig.TestHCatLoader APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier AssumptionSetter EqualityVerifier NullVerifier HybridVerifier
@Test public void testProjectionsBasic() throws IOException {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
PigServer server=new PigServer(ExecType.LOCAL);
server.registerQuery("Y1 = load '" + BASIC_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
server.registerQuery("Y2 = foreach Y1 generate a;");
server.registerQuery("Y3 = foreach Y1 generate b,a;");
Schema dumpedY2Schema=server.dumpSchema("Y2");
Schema dumpedY3Schema=server.dumpSchema("Y3");
List Y2fields=dumpedY2Schema.getFields();
List Y3fields=dumpedY3Schema.getFields();
assertEquals(1,Y2fields.size());
assertEquals("a",Y2fields.get(0).alias.toLowerCase());
assertEquals(DataType.INTEGER,Y2fields.get(0).type);
assertEquals(2,Y3fields.size());
assertEquals("b",Y3fields.get(0).alias.toLowerCase());
assertEquals(DataType.CHARARRAY,Y3fields.get(0).type);
assertEquals("a",Y3fields.get(1).alias.toLowerCase());
assertEquals(DataType.INTEGER,Y3fields.get(1).type);
int numTuplesRead=0;
Iterator Y2Iter=server.openIterator("Y2");
while (Y2Iter.hasNext()) {
Tuple t=Y2Iter.next();
assertEquals(t.size(),1);
assertNotNull(t.get(0));
assertTrue(t.get(0).getClass() == Integer.class);
assertEquals(t.get(0),basicInputData.get(numTuplesRead).first);
numTuplesRead++;
}
numTuplesRead=0;
Iterator Y3Iter=server.openIterator("Y3");
while (Y3Iter.hasNext()) {
Tuple t=Y3Iter.next();
assertEquals(t.size(),2);
assertNotNull(t.get(0));
assertTrue(t.get(0).getClass() == String.class);
assertEquals(t.get(0),basicInputData.get(numTuplesRead).second);
assertNotNull(t.get(1));
assertTrue(t.get(1).getClass() == Integer.class);
assertEquals(t.get(1),basicInputData.get(numTuplesRead).first);
numTuplesRead++;
}
assertEquals(basicInputData.size(),numTuplesRead);
}
APIUtilityVerifier InternalCallVerifier AssumptionSetter EqualityVerifier HybridVerifier
@Test public void testGetInputBytes() throws Exception {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
File file=new File(TEST_WAREHOUSE_DIR + "/" + SPECIFIC_SIZE_TABLE+ "/part-m-00000");
file.deleteOnExit();
RandomAccessFile randomAccessFile=new RandomAccessFile(file,"rw");
randomAccessFile.setLength(2L * 1024 * 1024* 1024);
randomAccessFile.close();
Job job=new Job();
HCatLoader hCatLoader=new HCatLoader();
hCatLoader.setUDFContextSignature("testGetInputBytes");
hCatLoader.setLocation(SPECIFIC_SIZE_TABLE,job);
ResourceStatistics statistics=hCatLoader.getStatistics(file.getAbsolutePath(),job);
assertEquals(2048,(long)statistics.getmBytes());
}
BooleanVerifier InternalCallVerifier AssumptionSetter EqualityVerifier NullVerifier HybridVerifier
@Test public void testSchemaLoadComplex() throws IOException {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
PigServer server=new PigServer(ExecType.LOCAL);
server.registerQuery("K = load '" + COMPLEX_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
Schema dumpedKSchema=server.dumpSchema("K");
List Kfields=dumpedKSchema.getFields();
assertEquals(6,Kfields.size());
assertEquals(DataType.CHARARRAY,Kfields.get(0).type);
assertEquals("name",Kfields.get(0).alias.toLowerCase());
assertEquals(DataType.INTEGER,Kfields.get(1).type);
assertEquals("studentid",Kfields.get(1).alias.toLowerCase());
assertEquals(DataType.TUPLE,Kfields.get(2).type);
assertEquals("contact",Kfields.get(2).alias.toLowerCase());
{
assertNotNull(Kfields.get(2).schema);
assertTrue(Kfields.get(2).schema.getFields().size() == 2);
assertTrue(Kfields.get(2).schema.getFields().get(0).type == DataType.CHARARRAY);
assertTrue(Kfields.get(2).schema.getFields().get(0).alias.equalsIgnoreCase("phno"));
assertTrue(Kfields.get(2).schema.getFields().get(1).type == DataType.CHARARRAY);
assertTrue(Kfields.get(2).schema.getFields().get(1).alias.equalsIgnoreCase("email"));
}
assertEquals(DataType.BAG,Kfields.get(3).type);
assertEquals("currently_registered_courses",Kfields.get(3).alias.toLowerCase());
{
assertNotNull(Kfields.get(3).schema);
assertEquals(1,Kfields.get(3).schema.getFields().size());
assertEquals(DataType.TUPLE,Kfields.get(3).schema.getFields().get(0).type);
assertNotNull(Kfields.get(3).schema.getFields().get(0).schema);
assertEquals(1,Kfields.get(3).schema.getFields().get(0).schema.getFields().size());
assertEquals(DataType.CHARARRAY,Kfields.get(3).schema.getFields().get(0).schema.getFields().get(0).type);
}
assertEquals(DataType.MAP,Kfields.get(4).type);
assertEquals("current_grades",Kfields.get(4).alias.toLowerCase());
assertEquals(DataType.BAG,Kfields.get(5).type);
assertEquals("phnos",Kfields.get(5).alias.toLowerCase());
{
assertNotNull(Kfields.get(5).schema);
assertEquals(1,Kfields.get(5).schema.getFields().size());
assertEquals(DataType.TUPLE,Kfields.get(5).schema.getFields().get(0).type);
assertNotNull(Kfields.get(5).schema.getFields().get(0).schema);
assertTrue(Kfields.get(5).schema.getFields().get(0).schema.getFields().size() == 2);
assertEquals(DataType.CHARARRAY,Kfields.get(5).schema.getFields().get(0).schema.getFields().get(0).type);
assertEquals("phno",Kfields.get(5).schema.getFields().get(0).schema.getFields().get(0).alias.toLowerCase());
assertEquals(DataType.CHARARRAY,Kfields.get(5).schema.getFields().get(0).schema.getFields().get(1).type);
assertEquals("type",Kfields.get(5).schema.getFields().get(0).schema.getFields().get(1).alias.toLowerCase());
}
}
BooleanVerifier InternalCallVerifier AssumptionSetter EqualityVerifier HybridVerifier
@Test public void testSchemaLoadBasic() throws IOException {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
PigServer server=new PigServer(ExecType.LOCAL);
server.registerQuery("X = load '" + BASIC_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
Schema dumpedXSchema=server.dumpSchema("X");
List Xfields=dumpedXSchema.getFields();
assertEquals(2,Xfields.size());
assertTrue(Xfields.get(0).alias.equalsIgnoreCase("a"));
assertTrue(Xfields.get(0).type == DataType.INTEGER);
assertTrue(Xfields.get(1).alias.equalsIgnoreCase("b"));
assertTrue(Xfields.get(1).type == DataType.CHARARRAY);
}
APIUtilityVerifier InternalCallVerifier AssumptionSetter EqualityVerifier HybridVerifier
/**
* Tests the failure case caused by HIVE-10752
* @throws Exception
*/
@Test public void testColumnarStorePushdown2() throws Exception {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
PigServer server=new PigServer(ExecType.LOCAL);
server.registerQuery("A = load '" + COMPLEX_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
server.registerQuery("B = load '" + COMPLEX_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
server.registerQuery("C = join A by name, B by name;");
server.registerQuery("D = foreach C generate B::studentid;");
server.registerQuery("E = ORDER D by studentid asc;");
Iterator iter=server.openIterator("E");
Tuple t=iter.next();
assertEquals(42,t.get(0));
t=iter.next();
assertEquals(1337,t.get(0));
}
APIUtilityVerifier IterativeVerifier BranchVerifier BooleanVerifier InternalCallVerifier AssumptionSetter EqualityVerifier NullVerifier HybridVerifier
@Test public void testReadPartitionedBasic() throws IOException, CommandNeedRetryException {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
PigServer server=new PigServer(ExecType.LOCAL);
driver.run("select * from " + PARTITIONED_TABLE);
ArrayList valuesReadFromHiveDriver=new ArrayList();
driver.getResults(valuesReadFromHiveDriver);
assertEquals(basicInputData.size(),valuesReadFromHiveDriver.size());
server.registerQuery("W = load '" + PARTITIONED_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
Schema dumpedWSchema=server.dumpSchema("W");
List Wfields=dumpedWSchema.getFields();
assertEquals(3,Wfields.size());
assertTrue(Wfields.get(0).alias.equalsIgnoreCase("a"));
assertTrue(Wfields.get(0).type == DataType.INTEGER);
assertTrue(Wfields.get(1).alias.equalsIgnoreCase("b"));
assertTrue(Wfields.get(1).type == DataType.CHARARRAY);
assertTrue(Wfields.get(2).alias.equalsIgnoreCase("bkt"));
assertTrue(Wfields.get(2).type == DataType.CHARARRAY);
Iterator WIter=server.openIterator("W");
Collection> valuesRead=new ArrayList>();
while (WIter.hasNext()) {
Tuple t=WIter.next();
assertTrue(t.size() == 3);
assertNotNull(t.get(0));
assertNotNull(t.get(1));
assertNotNull(t.get(2));
assertTrue(t.get(0).getClass() == Integer.class);
assertTrue(t.get(1).getClass() == String.class);
assertTrue(t.get(2).getClass() == String.class);
valuesRead.add(new Pair((Integer)t.get(0),(String)t.get(1)));
if ((Integer)t.get(0) < 2) {
assertEquals("0",t.get(2));
}
else {
assertEquals("1",t.get(2));
}
}
assertEquals(valuesReadFromHiveDriver.size(),valuesRead.size());
server.registerQuery("P1 = load '" + PARTITIONED_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
server.registerQuery("P1filter = filter P1 by bkt == '0';");
Iterator P1Iter=server.openIterator("P1filter");
int count1=0;
while (P1Iter.hasNext()) {
Tuple t=P1Iter.next();
assertEquals("0",t.get(2));
assertEquals(1,t.get(0));
count1++;
}
assertEquals(3,count1);
server.registerQuery("P2 = load '" + PARTITIONED_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
server.registerQuery("P2filter = filter P2 by bkt == '1';");
Iterator P2Iter=server.openIterator("P2filter");
int count2=0;
while (P2Iter.hasNext()) {
Tuple t=P2Iter.next();
assertEquals("1",t.get(2));
assertTrue(((Integer)t.get(0)) > 1);
count2++;
}
assertEquals(6,count2);
}
APIUtilityVerifier BranchVerifier BooleanVerifier InternalCallVerifier AssumptionSetter EqualityVerifier HybridVerifier
@Test public void testConvertBooleanToInt() throws Exception {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
String tbl="test_convert_boolean_to_int";
String inputFileName=TEST_DATA_DIR + "/testConvertBooleanToInt/data.txt";
File inputDataDir=new File(inputFileName).getParentFile();
inputDataDir.mkdir();
String[] lines=new String[]{"llama\ttrue","alpaca\tfalse"};
HcatTestUtils.createTestDataFile(inputFileName,lines);
assertEquals(0,driver.run("drop table if exists " + tbl).getResponseCode());
assertEquals(0,driver.run("create external table " + tbl + " (a string, b boolean) row format delimited fields terminated by '\t'"+ " stored as textfile location 'file:///"+ inputDataDir.getPath().replaceAll("\\\\","/")+ "'").getResponseCode());
Properties properties=new Properties();
properties.setProperty(HCatConstants.HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER,"true");
PigServer server=new PigServer(ExecType.LOCAL,properties);
server.registerQuery("data = load 'test_convert_boolean_to_int' using org.apache.hive.hcatalog.pig.HCatLoader();");
Schema schema=server.dumpSchema("data");
assertEquals(2,schema.getFields().size());
assertEquals("a",schema.getField(0).alias);
assertEquals(DataType.CHARARRAY,schema.getField(0).type);
assertEquals("b",schema.getField(1).alias);
if (PigHCatUtil.pigHasBooleanSupport()) {
assertEquals(DataType.BOOLEAN,schema.getField(1).type);
}
else {
assertEquals(DataType.INTEGER,schema.getField(1).type);
}
Iterator iterator=server.openIterator("data");
Tuple t=iterator.next();
assertEquals("llama",t.get(0));
assertEquals(1,t.get(1));
t=iterator.next();
assertEquals("alpaca",t.get(0));
assertEquals(0,t.get(1));
assertFalse(iterator.hasNext());
}
APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier AssumptionSetter EqualityVerifier NullVerifier HybridVerifier
@Test public void testReadDataBasic() throws IOException {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
PigServer server=new PigServer(ExecType.LOCAL);
server.registerQuery("X = load '" + BASIC_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
Iterator XIter=server.openIterator("X");
int numTuplesRead=0;
while (XIter.hasNext()) {
Tuple t=XIter.next();
assertEquals(2,t.size());
assertNotNull(t.get(0));
assertNotNull(t.get(1));
assertTrue(t.get(0).getClass() == Integer.class);
assertTrue(t.get(1).getClass() == String.class);
assertEquals(t.get(0),basicInputData.get(numTuplesRead).first);
assertEquals(t.get(1),basicInputData.get(numTuplesRead).second);
numTuplesRead++;
}
assertEquals(basicInputData.size(),numTuplesRead);
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testColumnarStorePushdown() throws Exception {
String PIGOUTPUT_DIR=TEST_DATA_DIR + "/colpushdownop";
String PIG_FILE="test.pig";
String expectedCols="0,1";
PrintWriter w=new PrintWriter(new FileWriter(PIG_FILE));
w.println("A = load '" + COMPLEX_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
w.println("B = foreach A generate name,studentid;");
w.println("C = filter B by name is not null;");
w.println("store C into '" + PIGOUTPUT_DIR + "' using PigStorage();");
w.close();
try {
String[] args={"-x","local",PIG_FILE};
PigStats stats=PigRunner.run(args,null);
assertTrue(stats.isSuccessful());
OutputStats outstats=stats.getOutputStats().get(0);
assertTrue(outstats != null);
assertEquals(expectedCols,outstats.getConf().get(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR));
FileSystem fs=FileSystem.get(outstats.getConf());
if (fs.exists(new Path(PIGOUTPUT_DIR))) {
fs.delete(new Path(PIGOUTPUT_DIR),true);
}
}
finally {
new File(PIG_FILE).delete();
}
}
Class: org.apache.hive.hcatalog.pig.TestHCatLoaderEncryption APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier AssumptionSetter EqualityVerifier NullVerifier HybridVerifier
@Test public void testReadDataFromEncryptedHiveTableByPig() throws IOException {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
PigServer server=new PigServer(ExecType.LOCAL);
server.registerQuery("X = load '" + ENCRYPTED_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
Iterator XIter=server.openIterator("X");
int numTuplesRead=0;
while (XIter.hasNext()) {
Tuple t=XIter.next();
assertEquals(2,t.size());
assertNotNull(t.get(0));
assertNotNull(t.get(1));
assertTrue(t.get(0).getClass() == Integer.class);
assertTrue(t.get(1).getClass() == String.class);
assertEquals(t.get(0),basicInputData.get(numTuplesRead).first);
assertEquals(t.get(1),basicInputData.get(numTuplesRead).second);
numTuplesRead++;
}
assertEquals("failed with storage format: " + this.storageFormat,basicInputData.size(),numTuplesRead);
}
BooleanVerifier InternalCallVerifier AssumptionSetter EqualityVerifier NullVerifier HybridVerifier
@Test public void testReadDataFromEncryptedHiveTableByHCatMR() throws Exception {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
readRecords.clear();
Configuration conf=new Configuration();
Job job=new Job(conf,"hcat mapreduce read encryption test");
job.setJarByClass(this.getClass());
job.setMapperClass(TestHCatLoaderEncryption.MapRead.class);
job.setInputFormatClass(HCatInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
HCatInputFormat.setInput(job,MetaStoreUtils.DEFAULT_DATABASE_NAME,ENCRYPTED_TABLE,null);
job.setMapOutputKeyClass(BytesWritable.class);
job.setMapOutputValueClass(Text.class);
job.setNumReduceTasks(0);
FileSystem fs=new LocalFileSystem();
String pathLoc=TEST_DATA_DIR + "/testHCatMREncryptionOutput";
Path path=new Path(pathLoc);
if (fs.exists(path)) {
fs.delete(path,true);
}
TextOutputFormat.setOutputPath(job,new Path(WindowsPathUtil.getHdfsUriString(pathLoc)));
job.waitForCompletion(true);
int numTuplesRead=0;
for ( HCatRecord hCatRecord : readRecords) {
assertEquals(2,hCatRecord.size());
assertNotNull(hCatRecord.get(0));
assertNotNull(hCatRecord.get(1));
assertTrue(hCatRecord.get(0).getClass() == Integer.class);
assertTrue(hCatRecord.get(1).getClass() == String.class);
assertEquals(hCatRecord.get(0),basicInputData.get(numTuplesRead).first);
assertEquals(hCatRecord.get(1),basicInputData.get(numTuplesRead).second);
numTuplesRead++;
}
assertEquals("failed HCat MR read with storage format: " + this.storageFormat,basicInputData.size(),numTuplesRead);
}
Class: org.apache.hive.hcatalog.pig.TestHCatLoaderStorer APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
/**
* Test round trip of smallint/tinyint: Hive->Pig->Hive. This is a more general use case in HCatalog:
* 'read some data from Hive, process it in Pig, write result back to a Hive table'
*/
@Test public void testReadWrite() throws Exception {
final String tblName="small_ints_table";
final String tblName2="pig_hcatalog_1";
File dataDir=new File(TEST_DATA_DIR + File.separator + "testReadWrite");
FileUtil.fullyDelete(dataDir);
Assert.assertTrue(dataDir.mkdir());
final String INPUT_FILE_NAME=dataDir + "/inputtrw.data";
TestHCatLoader.dropTable(tblName,driver);
HcatTestUtils.createTestDataFile(INPUT_FILE_NAME,new String[]{"40\t1"});
TestHCatLoader.executeStatementOnDriver("create external table " + tblName + " (my_small_int smallint, my_tiny_int tinyint)"+ " row format delimited fields terminated by '\t' stored as textfile location '"+ dataDir.toURI().getPath()+ "'",driver);
TestHCatLoader.dropTable(tblName2,driver);
TestHCatLoader.createTable(tblName2,"my_small_int smallint, my_tiny_int tinyint",null,driver,"textfile");
LOG.debug("File=" + INPUT_FILE_NAME);
TestHCatStorer.dumpFile(INPUT_FILE_NAME);
PigServer server=createPigServer(true);
try {
int queryNumber=1;
logAndRegister(server,"A = load '" + tblName + "' using org.apache.hive.hcatalog.pig.HCatLoader() as (my_small_int:int, my_tiny_int:int);",queryNumber++);
logAndRegister(server,"b = foreach A generate my_small_int + my_tiny_int as my_small_int, my_tiny_int;",queryNumber++);
logAndRegister(server,"store b into '" + tblName2 + "' using org.apache.hive.hcatalog.pig.HCatStorer();",queryNumber);
TestHCatLoader.executeStatementOnDriver("select my_small_int from " + tblName2,driver);
ArrayList l=new ArrayList();
driver.getResults(l);
for ( Object t : l) {
LOG.debug("t=" + t);
}
Assert.assertEquals("Expected '1' rows; got '" + l.size() + "'",1,l.size());
int result=Integer.parseInt((String)l.get(0));
Assert.assertEquals("Expected value '41'; got '" + result + "'",41,result);
}
finally {
server.shutdown();
}
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
/**
* Ensure Pig can read/write tinyint/smallint columns.
*/
@Test public void testSmallTinyInt() throws Exception {
String readTblName="test_small_tiny_int";
File dataDir=new File(TEST_DATA_DIR + "/testSmallTinyIntData");
File dataFile=new File(dataDir,"testSmallTinyInt.tsv");
String writeTblName="test_small_tiny_int_write";
File writeDataFile=new File(TEST_DATA_DIR,writeTblName + ".tsv");
FileUtil.fullyDelete(dataDir);
Assert.assertTrue(dataDir.mkdir());
HcatTestUtils.createTestDataFile(dataFile.getAbsolutePath(),new String[]{String.format("%d\t%d",Short.MIN_VALUE,Byte.MIN_VALUE),String.format("%d\t%d",Short.MAX_VALUE,Byte.MAX_VALUE)});
Assert.assertEquals(0,driver.run("drop table if exists " + readTblName).getResponseCode());
Assert.assertEquals(0,driver.run("create external table " + readTblName + " (my_small_int smallint, my_tiny_int tinyint)"+ " row format delimited fields terminated by '\t' stored as textfile").getResponseCode());
Assert.assertEquals(0,driver.run("load data local inpath '" + dataDir.getPath().replaceAll("\\\\","/") + "' into table "+ readTblName).getResponseCode());
PigServer server=new PigServer(ExecType.LOCAL);
server.registerQuery("data = load '" + readTblName + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
Schema schema=server.dumpSchema("data");
Assert.assertEquals(2,schema.getFields().size());
Assert.assertEquals("my_small_int",schema.getField(0).alias);
Assert.assertEquals(DataType.INTEGER,schema.getField(0).type);
Assert.assertEquals("my_tiny_int",schema.getField(1).alias);
Assert.assertEquals(DataType.INTEGER,schema.getField(1).type);
Iterator it=server.openIterator("data");
Tuple t=it.next();
Assert.assertEquals(new Integer(Short.MIN_VALUE),t.get(0));
Assert.assertEquals(new Integer(Byte.MIN_VALUE),t.get(1));
t=it.next();
Assert.assertEquals(new Integer(Short.MAX_VALUE),t.get(0));
Assert.assertEquals(new Integer(Byte.MAX_VALUE),t.get(1));
Assert.assertFalse(it.hasNext());
Assert.assertEquals(0,driver.run("drop table if exists " + writeTblName).getResponseCode());
Assert.assertEquals(0,driver.run("create table " + writeTblName + " (my_small_int smallint, my_tiny_int tinyint) stored as rcfile").getResponseCode());
HcatTestUtils.createTestDataFile(writeDataFile.getAbsolutePath(),new String[]{String.format("%d\t%d",Short.MIN_VALUE,Byte.MIN_VALUE),String.format("%d\t%d",Short.MAX_VALUE,Byte.MAX_VALUE)});
smallTinyIntBoundsCheckHelper(writeDataFile.getPath().replaceAll("\\\\","/"),ExecJob.JOB_STATUS.COMPLETED);
HcatTestUtils.createTestDataFile(TEST_DATA_DIR + "/shortTooSmall.tsv",new String[]{String.format("%d\t%d",Short.MIN_VALUE - 1,0)});
smallTinyIntBoundsCheckHelper(TEST_DATA_DIR + "/shortTooSmall.tsv",ExecJob.JOB_STATUS.FAILED);
HcatTestUtils.createTestDataFile(TEST_DATA_DIR + "/shortTooBig.tsv",new String[]{String.format("%d\t%d",Short.MAX_VALUE + 1,0)});
smallTinyIntBoundsCheckHelper(TEST_DATA_DIR + "/shortTooBig.tsv",ExecJob.JOB_STATUS.FAILED);
HcatTestUtils.createTestDataFile(TEST_DATA_DIR + "/byteTooSmall.tsv",new String[]{String.format("%d\t%d",0,Byte.MIN_VALUE - 1)});
smallTinyIntBoundsCheckHelper(TEST_DATA_DIR + "/byteTooSmall.tsv",ExecJob.JOB_STATUS.FAILED);
HcatTestUtils.createTestDataFile(TEST_DATA_DIR + "/byteTooBig.tsv",new String[]{String.format("%d\t%d",0,Byte.MAX_VALUE + 1)});
smallTinyIntBoundsCheckHelper(TEST_DATA_DIR + "/byteTooBig.tsv",ExecJob.JOB_STATUS.FAILED);
}
Class: org.apache.hive.hcatalog.pig.TestHCatStorer APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier AssumptionSetter EqualityVerifier HybridVerifier
@Test public void testPartColsInData() throws IOException, CommandNeedRetryException {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
driver.run("drop table junit_unparted");
String createTable="create table junit_unparted(a int) partitioned by (b string) stored as " + storageFormat;
int retCode=driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
}
int LOOP_SIZE=11;
String[] input=new String[LOOP_SIZE];
for (int i=0; i < LOOP_SIZE; i++) {
input[i]=i + "\t1";
}
HcatTestUtils.createTestDataFile(INPUT_FILE_NAME,input);
PigServer server=new PigServer(ExecType.LOCAL);
server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, b:chararray);");
server.registerQuery("store A into 'default.junit_unparted' using " + HCatStorer.class.getName() + "('b=1');");
server.registerQuery("B = load 'default.junit_unparted' using " + HCatLoader.class.getName() + "();");
Iterator itr=server.openIterator("B");
int i=0;
while (itr.hasNext()) {
Tuple t=itr.next();
assertEquals(2,t.size());
assertEquals(t.get(0),i);
assertEquals(t.get(1),"1");
i++;
}
assertFalse(itr.hasNext());
assertEquals(LOOP_SIZE,i);
}
APIUtilityVerifier BooleanVerifier AssumptionSetter EqualityVerifier HybridVerifier
@Test public void testBagNStruct() throws IOException, CommandNeedRetryException {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
driver.run("drop table junit_unparted");
String createTable="create table junit_unparted(b string,a struct, arr_of_struct array, " + "arr_of_struct2 array>, arr_of_struct3 array>) stored as " + storageFormat;
int retCode=driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
}
String[] inputData=new String[]{"zookeeper\t(2)\t{(pig)}\t{(pnuts,hdfs)}\t{(hadoop),(hcat)}","chubby\t(2)\t{(sawzall)}\t{(bigtable,gfs)}\t{(mapreduce),(hcat)}"};
HcatTestUtils.createTestDataFile(INPUT_FILE_NAME,inputData);
PigServer server=new PigServer(ExecType.LOCAL);
server.setBatchOn();
server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (b:chararray, a:tuple(a1:int), arr_of_struct:bag{mytup:tuple(s1:chararray)}, arr_of_struct2:bag{mytup:tuple(s1:chararray,s2:chararray)}, arr_of_struct3:bag{t3:tuple(s3:chararray)});");
server.registerQuery("store A into 'default.junit_unparted' using " + HCatStorer.class.getName() + "('','b:chararray, a:tuple(a1:int),"+ " arr_of_struct:bag{mytup:tuple(s1:chararray)}, arr_of_struct2:bag{mytup:tuple(s1:chararray,s2:chararray)}, arr_of_struct3:bag{t3:tuple(s3:chararray)}');");
server.executeBatch();
driver.run("select * from junit_unparted");
ArrayList res=new ArrayList();
driver.getResults(res);
driver.run("drop table junit_unparted");
Iterator itr=res.iterator();
assertEquals("zookeeper\t{\"a1\":2}\t[\"pig\"]\t[{\"s1\":\"pnuts\",\"s2\":\"hdfs\"}]\t[{\"s3\":\"hadoop\"},{\"s3\":\"hcat\"}]",itr.next());
assertEquals("chubby\t{\"a1\":2}\t[\"sawzall\"]\t[{\"s1\":\"bigtable\",\"s2\":\"gfs\"}]\t[{\"s3\":\"mapreduce\"},{\"s3\":\"hcat\"}]",itr.next());
assertFalse(itr.hasNext());
}
IterativeVerifier AssumptionSetter EqualityVerifier HybridVerifier
/**
* Create a data file with datatypes added in 0.13. Read it with Pig and use
* Pig + HCatStorer to write to a Hive table. Then read it using Pig and Hive
* and make sure results match.
*/
@Test public void testDateCharTypes() throws Exception {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
final String tblName="junit_date_char";
TestHCatLoader.dropTable(tblName,driver);
TestHCatLoader.createTable(tblName,"id int, char5 char(5), varchar10 varchar(10), dec52 decimal(5,2)",null,driver,storageFormat);
int NUM_ROWS=5;
String[] rows=new String[NUM_ROWS];
for (int i=0; i < NUM_ROWS; i++) {
rows[i]=i + "\txxxxx\tyyy\t" + 5.2;
}
HcatTestUtils.createTestDataFile(INPUT_FILE_NAME,rows);
LOG.debug("File=" + INPUT_FILE_NAME);
PigServer server=createPigServer(true);
int queryNumber=1;
logAndRegister(server,"A = load '" + INPUT_FILE_NAME + "' as (id:int, char5:chararray, varchar10:chararray, dec52:bigdecimal);",queryNumber++);
logAndRegister(server,"store A into '" + tblName + "' using "+ HCatStorer.class.getName()+ "();",queryNumber++);
logAndRegister(server,"B = load '" + tblName + "' using "+ HCatLoader.class.getName()+ "();",queryNumber);
CommandProcessorResponse cpr=driver.run("select * from " + tblName);
LOG.debug("cpr.respCode=" + cpr.getResponseCode() + " cpr.errMsg="+ cpr.getErrorMessage());
List l=new ArrayList();
driver.getResults(l);
LOG.debug("Dumping rows via SQL from " + tblName);
for ( Object t : l) {
LOG.debug(t == null ? null : t.toString());
}
Iterator itr=server.openIterator("B");
int numRowsRead=0;
while (itr.hasNext()) {
Tuple t=itr.next();
StringBuilder rowFromPig=new StringBuilder();
for (int i=0; i < t.size(); i++) {
rowFromPig.append(t.get(i)).append("\t");
}
rowFromPig.setLength(rowFromPig.length() - 1);
assertEquals("Comparing Pig to Raw data",rows[numRowsRead],rowFromPig.toString());
numRowsRead++;
}
assertEquals("Expected " + NUM_ROWS + " rows; got "+ numRowsRead+ " file="+ INPUT_FILE_NAME,NUM_ROWS,numRowsRead);
}
APIUtilityVerifier IterativeVerifier BooleanVerifier AssumptionSetter EqualityVerifier HybridVerifier
@Test public void testStoreMultiTables() throws IOException, CommandNeedRetryException {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
driver.run("drop table junit_unparted");
String createTable="create table junit_unparted(a int, b string) stored as " + storageFormat;
int retCode=driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
}
driver.run("drop table junit_unparted2");
createTable="create table junit_unparted2(a int, b string) stored as RCFILE";
retCode=driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
}
int LOOP_SIZE=3;
String[] input=new String[LOOP_SIZE * LOOP_SIZE];
int k=0;
for (int i=1; i <= LOOP_SIZE; i++) {
String si=i + "";
for (int j=1; j <= LOOP_SIZE; j++) {
input[k++]=si + "\t" + j;
}
}
HcatTestUtils.createTestDataFile(INPUT_FILE_NAME,input);
PigServer server=new PigServer(ExecType.LOCAL);
server.setBatchOn();
server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, b:chararray);");
server.registerQuery("B = filter A by a < 2;");
server.registerQuery("store B into 'junit_unparted' using " + HCatStorer.class.getName() + "();");
server.registerQuery("C = filter A by a >= 2;");
server.registerQuery("store C into 'junit_unparted2' using " + HCatStorer.class.getName() + "();");
server.executeBatch();
driver.run("select * from junit_unparted");
ArrayList res=new ArrayList();
driver.getResults(res);
driver.run("select * from junit_unparted2");
ArrayList res2=new ArrayList();
driver.getResults(res2);
res.addAll(res2);
driver.run("drop table junit_unparted");
driver.run("drop table junit_unparted2");
Iterator itr=res.iterator();
for (int i=0; i < LOOP_SIZE * LOOP_SIZE; i++) {
assertEquals(input[i],itr.next());
}
assertFalse(itr.hasNext());
}
APIUtilityVerifier IterativeVerifier BooleanVerifier AssumptionSetter EqualityVerifier HybridVerifier
@Test public void testStoreFuncSimple() throws IOException, CommandNeedRetryException {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
driver.run("drop table junit_unparted");
String createTable="create table junit_unparted(a int, b string) stored as " + storageFormat;
int retCode=driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
}
int LOOP_SIZE=3;
String[] inputData=new String[LOOP_SIZE * LOOP_SIZE];
int k=0;
for (int i=1; i <= LOOP_SIZE; i++) {
String si=i + "";
for (int j=1; j <= LOOP_SIZE; j++) {
inputData[k++]=si + "\t" + j;
}
}
HcatTestUtils.createTestDataFile(INPUT_FILE_NAME,inputData);
PigServer server=new PigServer(ExecType.LOCAL);
server.setBatchOn();
server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, b:chararray);");
server.registerQuery("store A into 'default.junit_unparted' using " + HCatStorer.class.getName() + "('','a:int,b:chararray');");
server.executeBatch();
driver.run("select * from junit_unparted");
ArrayList res=new ArrayList();
driver.getResults(res);
driver.run("drop table junit_unparted");
Iterator itr=res.iterator();
for (int i=1; i <= LOOP_SIZE; i++) {
String si=i + "";
for (int j=1; j <= LOOP_SIZE; j++) {
assertEquals(si + "\t" + j,itr.next());
}
}
assertFalse(itr.hasNext());
}
APIUtilityVerifier IterativeVerifier BooleanVerifier AssumptionSetter EqualityVerifier HybridVerifier
@Test public void testStoreWithNoSchema() throws IOException, CommandNeedRetryException {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
driver.run("drop table junit_unparted");
String createTable="create table junit_unparted(a int, b string) stored as " + storageFormat;
int retCode=driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
}
int LOOP_SIZE=3;
String[] input=new String[LOOP_SIZE * LOOP_SIZE];
int k=0;
for (int i=1; i <= LOOP_SIZE; i++) {
String si=i + "";
for (int j=1; j <= LOOP_SIZE; j++) {
input[k++]=si + "\t" + j;
}
}
HcatTestUtils.createTestDataFile(INPUT_FILE_NAME,input);
PigServer server=new PigServer(ExecType.LOCAL);
server.setBatchOn();
server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, b:chararray);");
server.registerQuery("store A into 'default.junit_unparted' using " + HCatStorer.class.getName() + "('');");
server.executeBatch();
driver.run("select * from junit_unparted");
ArrayList res=new ArrayList();
driver.getResults(res);
driver.run("drop table junit_unparted");
Iterator itr=res.iterator();
for (int i=0; i < LOOP_SIZE * LOOP_SIZE; i++) {
assertEquals(input[i],itr.next());
}
assertFalse(itr.hasNext());
}
AssumptionSetter EqualityVerifier HybridVerifier
@Test public void testDynamicPartitioningMultiPartColsInDataNoSpec() throws IOException, CommandNeedRetryException {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
driver.run("drop table if exists employee");
String createTable="CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " + " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + storageFormat;
int retCode=driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
}
String[] inputData={"111237\tKrishna\t01/01/1990\tM\tIN\tTN","111238\tKalpana\t01/01/2000\tF\tIN\tKA","111239\tSatya\t01/01/2001\tM\tIN\tKL","111240\tKavya\t01/01/2002\tF\tIN\tAP"};
HcatTestUtils.createTestDataFile(INPUT_FILE_NAME,inputData);
PigServer pig=new PigServer(ExecType.LOCAL);
pig.setBatchOn();
pig.registerQuery("A = LOAD '" + INPUT_FILE_NAME + "' USING PigStorage() AS (emp_id:int,emp_name:chararray,emp_start_date:chararray,"+ "emp_gender:chararray,emp_country:chararray,emp_state:chararray);");
pig.registerQuery("IN = FILTER A BY emp_country == 'IN';");
pig.registerQuery("STORE IN INTO 'employee' USING " + HCatStorer.class.getName() + "();");
pig.executeBatch();
driver.run("select * from employee");
ArrayList results=new ArrayList();
driver.getResults(results);
assertEquals(4,results.size());
Collections.sort(results);
assertEquals(inputData[0],results.get(0));
assertEquals(inputData[1],results.get(1));
assertEquals(inputData[2],results.get(2));
assertEquals(inputData[3],results.get(3));
driver.run("drop table employee");
}
APIUtilityVerifier BooleanVerifier InternalCallVerifier AssumptionSetter EqualityVerifier HybridVerifier
@Test public void testNoAlias() throws IOException, CommandNeedRetryException {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
driver.run("drop table junit_parted");
String createTable="create table junit_parted(a int, b string) partitioned by (ds string) stored as " + storageFormat;
int retCode=driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
}
PigServer server=new PigServer(ExecType.LOCAL);
boolean errCaught=false;
try {
server.setBatchOn();
server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, b:chararray);");
server.registerQuery("B = foreach A generate a+10, b;");
server.registerQuery("store B into 'junit_parted' using " + HCatStorer.class.getName() + "('ds=20100101');");
server.executeBatch();
}
catch ( PigException fe) {
PigException pe=LogUtils.getPigException(fe);
assertTrue(pe instanceof FrontendException);
assertEquals(PigHCatUtil.PIG_EXCEPTION_CODE,pe.getErrorCode());
assertTrue(pe.getMessage().contains("Column name for a field is not specified. Please provide the full schema as an argument to HCatStorer."));
errCaught=true;
}
assertTrue(errCaught);
errCaught=false;
try {
server.setBatchOn();
server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, B:chararray);");
server.registerQuery("B = foreach A generate a, B;");
server.registerQuery("store B into 'junit_parted' using " + HCatStorer.class.getName() + "('ds=20100101');");
server.executeBatch();
}
catch ( PigException fe) {
PigException pe=LogUtils.getPigException(fe);
assertTrue(pe instanceof FrontendException);
assertEquals(PigHCatUtil.PIG_EXCEPTION_CODE,pe.getErrorCode());
assertTrue(pe.getMessage().contains("Column names should all be in lowercase. Invalid name found: B"));
errCaught=true;
}
driver.run("drop table junit_parted");
assertTrue(errCaught);
}
AssumptionSetter EqualityVerifier HybridVerifier
@Test public void testDynamicPartitioningMultiPartColsInDataPartialSpec() throws IOException, CommandNeedRetryException {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
driver.run("drop table if exists employee");
String createTable="CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " + " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + storageFormat;
int retCode=driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
}
String[] inputData={"111237\tKrishna\t01/01/1990\tM\tIN\tTN","111238\tKalpana\t01/01/2000\tF\tIN\tKA","111239\tSatya\t01/01/2001\tM\tIN\tKL","111240\tKavya\t01/01/2002\tF\tIN\tAP"};
HcatTestUtils.createTestDataFile(INPUT_FILE_NAME,inputData);
PigServer pig=new PigServer(ExecType.LOCAL);
pig.setBatchOn();
pig.registerQuery("A = LOAD '" + INPUT_FILE_NAME + "' USING PigStorage() AS (emp_id:int,emp_name:chararray,emp_start_date:chararray,"+ "emp_gender:chararray,emp_country:chararray,emp_state:chararray);");
pig.registerQuery("IN = FILTER A BY emp_country == 'IN';");
pig.registerQuery("STORE IN INTO 'employee' USING " + HCatStorer.class.getName() + "('emp_country=IN');");
pig.executeBatch();
driver.run("select * from employee");
ArrayList results=new ArrayList();
driver.getResults(results);
assertEquals(4,results.size());
Collections.sort(results);
assertEquals(inputData[0],results.get(0));
assertEquals(inputData[1],results.get(1));
assertEquals(inputData[2],results.get(2));
assertEquals(inputData[3],results.get(3));
driver.run("drop table employee");
}
APIUtilityVerifier IterativeVerifier BranchVerifier BooleanVerifier InternalCallVerifier AssumptionSetter EqualityVerifier NullVerifier HybridVerifier
@Test public void testStoreFuncAllSimpleTypes() throws IOException, CommandNeedRetryException {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
driver.run("drop table junit_unparted");
String createTable="create table junit_unparted(a int, b float, c double, d bigint, e string, h boolean, f binary, g binary) stored as " + storageFormat;
int retCode=driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
}
int i=0;
String[] input=new String[3];
input[i++]="0\t\t\t\t\t\t\t";
input[i++]="\t" + i * 2.1f + "\t" + i * 1.1d + "\t" + i * 2L + "\t" + "lets hcat" + "\t" + "true" + "\tbinary-data";
input[i++]=i + "\t" + i * 2.1f + "\t" + i * 1.1d + "\t" + i * 2L + "\t" + "lets hcat" + "\t" + "false" + "\tbinary-data";
HcatTestUtils.createTestDataFile(INPUT_FILE_NAME,input);
PigServer server=new PigServer(ExecType.LOCAL);
server.setBatchOn();
server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, b:float, c:double, d:long, e:chararray, h:boolean, f:bytearray);");
server.registerQuery("store A into 'default.junit_unparted' using " + HCatStorer.class.getName() + "('','a:int, b:float, c:double, d:long, e:chararray, h:boolean, f:bytearray');");
server.executeBatch();
driver.run("select * from junit_unparted");
ArrayList res=new ArrayList();
driver.getResults(res);
Iterator itr=res.iterator();
String next=itr.next();
assertEquals("0\tNULL\tNULL\tNULL\tNULL\tNULL\tNULL\tNULL",next);
assertEquals("NULL\t4.2\t2.2\t4\tlets hcat\ttrue\tbinary-data\tNULL",itr.next());
assertEquals("3\t6.2999997\t3.3000000000000003\t6\tlets hcat\tfalse\tbinary-data\tNULL",itr.next());
assertFalse(itr.hasNext());
server.registerQuery("B = load 'junit_unparted' using " + HCatLoader.class.getName() + ";");
Iterator iter=server.openIterator("B");
int count=0;
int num5nulls=0;
while (iter.hasNext()) {
Tuple t=iter.next();
if (t.get(6) == null) {
num5nulls++;
}
else {
assertTrue(t.get(6) instanceof DataByteArray);
}
assertNull(t.get(7));
count++;
}
assertEquals(3,count);
assertEquals(1,num5nulls);
driver.run("drop table junit_unparted");
}
AssumptionSetter EqualityVerifier HybridVerifier
@Test public void testMultiPartColsInData() throws IOException, CommandNeedRetryException {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
driver.run("drop table employee");
String createTable="CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " + " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + storageFormat;
int retCode=driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
}
String[] inputData={"111237\tKrishna\t01/01/1990\tM\tIN\tTN","111238\tKalpana\t01/01/2000\tF\tIN\tKA","111239\tSatya\t01/01/2001\tM\tIN\tKL","111240\tKavya\t01/01/2002\tF\tIN\tAP"};
HcatTestUtils.createTestDataFile(INPUT_FILE_NAME,inputData);
PigServer pig=new PigServer(ExecType.LOCAL);
pig.setBatchOn();
pig.registerQuery("A = LOAD '" + INPUT_FILE_NAME + "' USING PigStorage() AS (emp_id:int,emp_name:chararray,emp_start_date:chararray,"+ "emp_gender:chararray,emp_country:chararray,emp_state:chararray);");
pig.registerQuery("TN = FILTER A BY emp_state == 'TN';");
pig.registerQuery("KA = FILTER A BY emp_state == 'KA';");
pig.registerQuery("KL = FILTER A BY emp_state == 'KL';");
pig.registerQuery("AP = FILTER A BY emp_state == 'AP';");
pig.registerQuery("STORE TN INTO 'employee' USING " + HCatStorer.class.getName() + "('emp_country=IN,emp_state=TN');");
pig.registerQuery("STORE KA INTO 'employee' USING " + HCatStorer.class.getName() + "('emp_country=IN,emp_state=KA');");
pig.registerQuery("STORE KL INTO 'employee' USING " + HCatStorer.class.getName() + "('emp_country=IN,emp_state=KL');");
pig.registerQuery("STORE AP INTO 'employee' USING " + HCatStorer.class.getName() + "('emp_country=IN,emp_state=AP');");
pig.executeBatch();
driver.run("select * from employee");
ArrayList results=new ArrayList();
driver.getResults(results);
assertEquals(4,results.size());
Collections.sort(results);
assertEquals(inputData[0],results.get(0));
assertEquals(inputData[1],results.get(1));
assertEquals(inputData[2],results.get(2));
assertEquals(inputData[3],results.get(3));
driver.run("drop table employee");
}
APIUtilityVerifier IterativeVerifier BooleanVerifier InternalCallVerifier AssumptionSetter EqualityVerifier HybridVerifier
@Test public void testStoreInPartiitonedTbl() throws IOException, CommandNeedRetryException {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
driver.run("drop table junit_unparted");
String createTable="create table junit_unparted(a int) partitioned by (b string) stored as " + storageFormat;
int retCode=driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
}
int LOOP_SIZE=11;
String[] input=new String[LOOP_SIZE];
for (int i=0; i < LOOP_SIZE; i++) {
input[i]=i + "";
}
HcatTestUtils.createTestDataFile(INPUT_FILE_NAME,input);
PigServer server=new PigServer(ExecType.LOCAL);
server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int);");
server.registerQuery("store A into 'default.junit_unparted' using " + HCatStorer.class.getName() + "('b=1');");
server.registerQuery("B = load 'default.junit_unparted' using " + HCatLoader.class.getName() + "();");
Iterator itr=server.openIterator("B");
int i=0;
while (itr.hasNext()) {
Tuple t=itr.next();
assertEquals(2,t.size());
assertEquals(t.get(0),i);
assertEquals(t.get(1),"1");
i++;
}
assertFalse(itr.hasNext());
assertEquals(11,i);
}
AssumptionSetter EqualityVerifier HybridVerifier
@Test public void testDynamicPartitioningMultiPartColsNoDataInDataNoSpec() throws IOException, CommandNeedRetryException {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
driver.run("drop table if exists employee");
String createTable="CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " + " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + storageFormat;
int retCode=driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
}
String[] inputData={};
HcatTestUtils.createTestDataFile(INPUT_FILE_NAME,inputData);
PigServer pig=new PigServer(ExecType.LOCAL);
pig.setBatchOn();
pig.registerQuery("A = LOAD '" + INPUT_FILE_NAME + "' USING PigStorage() AS (emp_id:int,emp_name:chararray,emp_start_date:chararray,"+ "emp_gender:chararray,emp_country:chararray,emp_state:chararray);");
pig.registerQuery("IN = FILTER A BY emp_country == 'IN';");
pig.registerQuery("STORE IN INTO 'employee' USING " + HCatStorer.class.getName() + "();");
pig.executeBatch();
driver.run("select * from employee");
ArrayList results=new ArrayList();
driver.getResults(results);
assertEquals(0,results.size());
driver.run("drop table employee");
}
APIUtilityVerifier IterativeVerifier BooleanVerifier AssumptionSetter EqualityVerifier HybridVerifier
@Test public void testStoreWithNoCtorArgs() throws IOException, CommandNeedRetryException {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
driver.run("drop table junit_unparted");
String createTable="create table junit_unparted(a int, b string) stored as " + storageFormat;
int retCode=driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
}
int LOOP_SIZE=3;
String[] input=new String[LOOP_SIZE * LOOP_SIZE];
int k=0;
for (int i=1; i <= LOOP_SIZE; i++) {
String si=i + "";
for (int j=1; j <= LOOP_SIZE; j++) {
input[k++]=si + "\t" + j;
}
}
HcatTestUtils.createTestDataFile(INPUT_FILE_NAME,input);
PigServer server=new PigServer(ExecType.LOCAL);
server.setBatchOn();
server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, b:chararray);");
server.registerQuery("store A into 'junit_unparted' using " + HCatStorer.class.getName() + "();");
server.executeBatch();
driver.run("select * from junit_unparted");
ArrayList res=new ArrayList();
driver.getResults(res);
driver.run("drop table junit_unparted");
Iterator itr=res.iterator();
for (int i=0; i < LOOP_SIZE * LOOP_SIZE; i++) {
assertEquals(input[i],itr.next());
}
assertFalse(itr.hasNext());
}
BooleanVerifier AssumptionSetter EqualityVerifier HybridVerifier
@Test public void testPartitionPublish() throws IOException, CommandNeedRetryException {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
driver.run("drop table ptn_fail");
String createTable="create table ptn_fail(a int, c string) partitioned by (b string) stored as " + storageFormat;
int retCode=driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
}
int LOOP_SIZE=11;
String[] input=new String[LOOP_SIZE];
for (int i=0; i < LOOP_SIZE; i++) {
input[i]=i + "\tmath";
}
HcatTestUtils.createTestDataFile(INPUT_FILE_NAME,input);
PigServer server=new PigServer(ExecType.LOCAL);
server.setBatchOn();
server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, c:chararray);");
server.registerQuery("B = filter A by " + FailEvalFunc.class.getName() + "($0);");
server.registerQuery("store B into 'ptn_fail' using " + HCatStorer.class.getName() + "('b=math');");
server.executeBatch();
String query="show partitions ptn_fail";
retCode=driver.run(query).getResponseCode();
if (retCode != 0) {
throw new IOException("Error " + retCode + " running query "+ query);
}
ArrayList res=new ArrayList();
driver.getResults(res);
assertEquals(0,res.size());
assertTrue((new File(TEST_WAREHOUSE_DIR + "/ptn_fail")).exists());
assertFalse((new File(TEST_WAREHOUSE_DIR + "/ptn_fail/b=math")).exists());
}
Class: org.apache.hive.hcatalog.pig.TestHCatStorerMulti AssumptionSetter EqualityVerifier HybridVerifier
@Test public void testStorePartitionedTable() throws Exception {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
createTable(PARTITIONED_TABLE,"a int, b string","bkt string");
populateBasicFile();
PigServer server=new PigServer(ExecType.LOCAL);
server.setBatchOn();
server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, b:chararray);");
server.registerQuery("B2 = filter A by a < 2;");
server.registerQuery("store B2 into '" + PARTITIONED_TABLE + "' using org.apache.hive.hcatalog.pig.HCatStorer('bkt=0');");
server.registerQuery("C2 = filter A by a >= 2;");
server.registerQuery("store C2 into '" + PARTITIONED_TABLE + "' using org.apache.hive.hcatalog.pig.HCatStorer('bkt=1');");
server.executeBatch();
driver.run("select * from " + PARTITIONED_TABLE);
ArrayList partitionedTableValuesReadFromHiveDriver=new ArrayList();
driver.getResults(partitionedTableValuesReadFromHiveDriver);
assertEquals(basicInputData.size(),partitionedTableValuesReadFromHiveDriver.size());
}
AssumptionSetter EqualityVerifier HybridVerifier
@Test public void testStoreBasicTable() throws Exception {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
createTable(BASIC_TABLE,"a int, b string");
populateBasicFile();
PigServer server=new PigServer(ExecType.LOCAL);
server.setBatchOn();
server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, b:chararray);");
server.registerQuery("store A into '" + BASIC_TABLE + "' using org.apache.hive.hcatalog.pig.HCatStorer();");
server.executeBatch();
driver.run("select * from " + BASIC_TABLE);
ArrayList unpartitionedTableValuesReadFromHiveDriver=new ArrayList();
driver.getResults(unpartitionedTableValuesReadFromHiveDriver);
assertEquals(basicInputData.size(),unpartitionedTableValuesReadFromHiveDriver.size());
}
AssumptionSetter EqualityVerifier HybridVerifier
@Test public void testStoreTableMulti() throws Exception {
assumeTrue(!TestUtil.shouldSkip(storageFormat,DISABLED_STORAGE_FORMATS));
createTable(BASIC_TABLE,"a int, b string");
createTable(PARTITIONED_TABLE,"a int, b string","bkt string");
populateBasicFile();
PigServer server=new PigServer(ExecType.LOCAL);
server.setBatchOn();
server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, b:chararray);");
server.registerQuery("store A into '" + BASIC_TABLE + "' using org.apache.hive.hcatalog.pig.HCatStorer();");
server.registerQuery("B2 = filter A by a < 2;");
server.registerQuery("store B2 into '" + PARTITIONED_TABLE + "' using org.apache.hive.hcatalog.pig.HCatStorer('bkt=0');");
server.registerQuery("C2 = filter A by a >= 2;");
server.registerQuery("store C2 into '" + PARTITIONED_TABLE + "' using org.apache.hive.hcatalog.pig.HCatStorer('bkt=1');");
server.executeBatch();
driver.run("select * from " + BASIC_TABLE);
ArrayList unpartitionedTableValuesReadFromHiveDriver=new ArrayList();
driver.getResults(unpartitionedTableValuesReadFromHiveDriver);
driver.run("select * from " + PARTITIONED_TABLE);
ArrayList partitionedTableValuesReadFromHiveDriver=new ArrayList();
driver.getResults(partitionedTableValuesReadFromHiveDriver);
assertEquals(basicInputData.size(),unpartitionedTableValuesReadFromHiveDriver.size());
assertEquals(basicInputData.size(),partitionedTableValuesReadFromHiveDriver.size());
}
Class: org.apache.hive.hcatalog.pig.TestHCatStorerWrapper APIUtilityVerifier IterativeVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testStoreExternalTableWithExternalDir() throws IOException, CommandNeedRetryException {
File tmpExternalDir=new File(TEST_DATA_DIR,UUID.randomUUID().toString());
tmpExternalDir.deleteOnExit();
String part_val="100";
driver.run("drop table junit_external");
String createTable="create external table junit_external(a int, b string) partitioned by (c string) stored as RCFILE";
Assert.assertEquals(0,driver.run(createTable).getResponseCode());
int LOOP_SIZE=3;
String[] inputData=new String[LOOP_SIZE * LOOP_SIZE];
int k=0;
for (int i=1; i <= LOOP_SIZE; i++) {
String si=i + "";
for (int j=1; j <= LOOP_SIZE; j++) {
inputData[k++]=si + "\t" + j;
}
}
HcatTestUtils.createTestDataFile(INPUT_FILE_NAME,inputData);
PigServer server=new PigServer(ExecType.LOCAL);
server.setBatchOn();
logAndRegister(server,"A = load '" + INPUT_FILE_NAME + "' as (a:int, b:chararray);");
logAndRegister(server,"store A into 'default.junit_external' using " + HCatStorerWrapper.class.getName() + "('c="+ part_val+ "','"+ tmpExternalDir.getPath().replaceAll("\\\\","/")+ "');");
server.executeBatch();
Assert.assertTrue(tmpExternalDir.exists());
boolean found=false;
File[] f=tmpExternalDir.listFiles();
if (f != null) {
for ( File fin : f) {
if (fin.getPath().contains("part-m-00000")) {
found=true;
}
}
}
Assert.assertTrue(found);
driver.run("select * from junit_external");
ArrayList res=new ArrayList();
driver.getResults(res);
driver.run("drop table junit_external");
Iterator itr=res.iterator();
for (int i=1; i <= LOOP_SIZE; i++) {
String si=i + "";
for (int j=1; j <= LOOP_SIZE; j++) {
Assert.assertEquals(si + "\t" + j+ "\t"+ part_val,itr.next());
}
}
Assert.assertFalse(itr.hasNext());
}
Class: org.apache.hive.hcatalog.pig.TestPigHCatUtil APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testGetBagSubSchema() throws Exception {
ResourceFieldSchema[] bagSubFieldSchemas=new ResourceFieldSchema[1];
bagSubFieldSchemas[0]=new ResourceFieldSchema().setName("innertuple").setDescription("The tuple in the bag").setType(DataType.TUPLE);
ResourceFieldSchema[] innerTupleFieldSchemas=new ResourceFieldSchema[1];
innerTupleFieldSchemas[0]=new ResourceFieldSchema().setName("innerfield").setType(DataType.CHARARRAY);
bagSubFieldSchemas[0].setSchema(new ResourceSchema().setFields(innerTupleFieldSchemas));
ResourceSchema expected=new ResourceSchema().setFields(bagSubFieldSchemas);
HCatSchema hCatSchema=new HCatSchema(Lists.newArrayList(new HCatFieldSchema("innerLlama",HCatFieldSchema.Type.STRING,null)));
HCatFieldSchema hCatFieldSchema=new HCatFieldSchema("llama",HCatFieldSchema.Type.ARRAY,hCatSchema,null);
ResourceSchema actual=PigHCatUtil.getBagSubSchema(hCatFieldSchema);
Assert.assertEquals(expected.toString(),actual.toString());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testGetBagSubSchemaConfigured() throws Exception {
System.setProperty(HCatConstants.HCAT_PIG_INNER_TUPLE_NAME,"t");
System.setProperty(HCatConstants.HCAT_PIG_INNER_FIELD_NAME,"FIELDNAME_tuple");
UDFContext.getUDFContext().setClientSystemProps(System.getProperties());
ResourceFieldSchema[] bagSubFieldSchemas=new ResourceFieldSchema[1];
bagSubFieldSchemas[0]=new ResourceFieldSchema().setName("t").setDescription("The tuple in the bag").setType(DataType.TUPLE);
ResourceFieldSchema[] innerTupleFieldSchemas=new ResourceFieldSchema[1];
innerTupleFieldSchemas[0]=new ResourceFieldSchema().setName("llama_tuple").setType(DataType.CHARARRAY);
bagSubFieldSchemas[0].setSchema(new ResourceSchema().setFields(innerTupleFieldSchemas));
ResourceSchema expected=new ResourceSchema().setFields(bagSubFieldSchemas);
HCatSchema actualHCatSchema=new HCatSchema(Lists.newArrayList(new HCatFieldSchema("innerLlama",HCatFieldSchema.Type.STRING,null)));
HCatFieldSchema actualHCatFieldSchema=new HCatFieldSchema("llama",HCatFieldSchema.Type.ARRAY,actualHCatSchema,null);
ResourceSchema actual=PigHCatUtil.getBagSubSchema(actualHCatFieldSchema);
Assert.assertEquals(expected.toString(),actual.toString());
System.clearProperty(HCatConstants.HCAT_PIG_INNER_TUPLE_NAME);
System.clearProperty(HCatConstants.HCAT_PIG_INNER_FIELD_NAME);
}
Class: org.apache.hive.hcatalog.streaming.TestStreaming InternalCallVerifier EqualityVerifier
@Test public void testTransactionBatchAbortAndCommit() throws Exception {
HiveEndPoint endPt=new HiveEndPoint(metaStoreURI,dbName,tblName,partitionVals);
DelimitedInputWriter writer=new DelimitedInputWriter(fieldNames,",",endPt);
StreamingConnection connection=endPt.newConnection(false);
TransactionBatch txnBatch=connection.fetchTransactionBatch(10,writer);
txnBatch.beginNextTransaction();
txnBatch.write("1,Hello streaming".getBytes());
txnBatch.write("2,Welcome to streaming".getBytes());
txnBatch.abort();
checkNothingWritten(partLoc);
Assert.assertEquals(TransactionBatch.TxnState.ABORTED,txnBatch.getCurrentTransactionState());
txnBatch.beginNextTransaction();
txnBatch.write("1,Hello streaming".getBytes());
txnBatch.write("2,Welcome to streaming".getBytes());
txnBatch.commit();
checkDataWritten(partLoc,1,10,1,1,"{1, Hello streaming}","{2, Welcome to streaming}");
txnBatch.close();
connection.close();
}
InternalCallVerifier EqualityVerifier
@Test public void testInterleavedTransactionBatchCommits() throws Exception {
HiveEndPoint endPt=new HiveEndPoint(metaStoreURI,dbName,tblName,partitionVals);
DelimitedInputWriter writer=new DelimitedInputWriter(fieldNames,",",endPt);
StreamingConnection connection=endPt.newConnection(false);
TransactionBatch txnBatch1=connection.fetchTransactionBatch(10,writer);
txnBatch1.beginNextTransaction();
DelimitedInputWriter writer2=new DelimitedInputWriter(fieldNames,",",endPt);
TransactionBatch txnBatch2=connection.fetchTransactionBatch(10,writer2);
txnBatch2.beginNextTransaction();
txnBatch1.write("1,Hello streaming".getBytes());
txnBatch2.write("3,Hello streaming - once again".getBytes());
checkNothingWritten(partLoc);
txnBatch2.commit();
checkDataWritten(partLoc,11,20,1,1,"{3, Hello streaming - once again}");
txnBatch1.commit();
checkDataWritten(partLoc,1,20,1,2,"{1, Hello streaming}","{3, Hello streaming - once again}");
txnBatch1.beginNextTransaction();
txnBatch1.write("2,Welcome to streaming".getBytes());
txnBatch2.beginNextTransaction();
txnBatch2.write("4,Welcome to streaming - once again".getBytes());
checkDataWritten(partLoc,1,20,1,2,"{1, Hello streaming}","{3, Hello streaming - once again}");
txnBatch1.commit();
checkDataWritten(partLoc,1,20,1,2,"{1, Hello streaming}","{2, Welcome to streaming}","{3, Hello streaming - once again}");
txnBatch2.commit();
checkDataWritten(partLoc,1,20,1,2,"{1, Hello streaming}","{2, Welcome to streaming}","{3, Hello streaming - once again}","{4, Welcome to streaming - once again}");
Assert.assertEquals(TransactionBatch.TxnState.COMMITTED,txnBatch1.getCurrentTransactionState());
Assert.assertEquals(TransactionBatch.TxnState.COMMITTED,txnBatch2.getCurrentTransactionState());
txnBatch1.close();
txnBatch2.close();
connection.close();
}
APIUtilityVerifier EqualityVerifier
@Test public void testFileDumpCorruptSideFiles() throws Exception {
dropDB(msClient,dbName3);
String dbLocation=dbFolder.newFolder(dbName3).getCanonicalPath() + ".db";
dbLocation=dbLocation.replaceAll("\\\\","/");
String[] colNames="key1,key2,data".split(",");
String[] colTypes="string,int,string".split(",");
String[] bucketNames="key1,key2".split(",");
int bucketCount=4;
createDbAndTable(driver,dbName3,tblName3,null,colNames,colTypes,bucketNames,null,dbLocation,bucketCount);
HiveEndPoint endPt=new HiveEndPoint(metaStoreURI,dbName3,tblName3,null);
DelimitedInputWriter writer=new DelimitedInputWriter(colNames,",",endPt);
StreamingConnection connection=endPt.newConnection(false);
TransactionBatch txnBatch=connection.fetchTransactionBatch(2,writer);
txnBatch.beginNextTransaction();
txnBatch.write("name0,1,Hello streaming".getBytes());
txnBatch.write("name2,2,Welcome to streaming".getBytes());
txnBatch.write("name4,2,more Streaming unlimited".getBytes());
txnBatch.write("name5,2,even more Streaming unlimited".getBytes());
txnBatch.write("name6,3,aHello streaming".getBytes());
txnBatch.commit();
Map> offsetMap=new HashMap>();
recordOffsets(conf,dbLocation,offsetMap);
txnBatch.beginNextTransaction();
txnBatch.write("name01,11,-Hello streaming".getBytes());
txnBatch.write("name21,21,-Welcome to streaming".getBytes());
txnBatch.write("name41,21,-more Streaming unlimited".getBytes());
txnBatch.write("name51,21,-even more Streaming unlimited".getBytes());
txnBatch.write("name02,12,--Hello streaming".getBytes());
txnBatch.write("name22,22,--Welcome to streaming".getBytes());
txnBatch.write("name42,22,--more Streaming unlimited".getBytes());
txnBatch.write("name52,22,--even more Streaming unlimited".getBytes());
txnBatch.write("name7,4,aWelcome to streaming".getBytes());
txnBatch.write("name8,5,amore Streaming unlimited".getBytes());
txnBatch.write("name9,6,aeven more Streaming unlimited".getBytes());
txnBatch.write("name10,7,bHello streaming".getBytes());
txnBatch.write("name11,8,bWelcome to streaming".getBytes());
txnBatch.write("name12,9,bmore Streaming unlimited".getBytes());
txnBatch.write("name13,10,beven more Streaming unlimited".getBytes());
txnBatch.commit();
recordOffsets(conf,dbLocation,offsetMap);
Path path=new Path(dbLocation);
Collection files=FileDump.getAllFilesInPath(path,conf);
for ( String file : files) {
if (file.contains("bucket_00000")) {
corruptSideFile(file,conf,offsetMap,"bucket_00000",-1);
}
else if (file.contains("bucket_00001")) {
corruptSideFile(file,conf,offsetMap,"bucket_00001",0);
}
else if (file.contains("bucket_00002")) {
corruptSideFile(file,conf,offsetMap,"bucket_00002",3);
}
else if (file.contains("bucket_00003")) {
corruptSideFile(file,conf,offsetMap,"bucket_00003",10);
}
}
PrintStream origErr=System.err;
ByteArrayOutputStream myErr=new ByteArrayOutputStream();
System.setErr(new PrintStream(myErr));
FileDump.main(new String[]{dbLocation});
System.err.flush();
System.setErr(origErr);
String errDump=new String(myErr.toByteArray());
Assert.assertEquals(true,errDump.contains("bucket_00000_flush_length [length: 11"));
Assert.assertEquals(true,errDump.contains("bucket_00001_flush_length [length: 0"));
Assert.assertEquals(true,errDump.contains("bucket_00002_flush_length [length: 24"));
Assert.assertEquals(true,errDump.contains("bucket_00003_flush_length [length: 80"));
Assert.assertEquals(false,errDump.contains("Exception"));
Assert.assertEquals(true,errDump.contains("4 file(s) are corrupted"));
Assert.assertEquals(false,errDump.contains("is still open for writes."));
origErr=System.err;
myErr=new ByteArrayOutputStream();
System.setErr(new PrintStream(myErr));
FileDump.main(new String[]{dbLocation,"--recover","--skip-dump"});
System.err.flush();
System.setErr(origErr);
errDump=new String(myErr.toByteArray());
Assert.assertEquals(true,errDump.contains("bucket_00000 recovered successfully!"));
Assert.assertEquals(true,errDump.contains("bucket_00001 recovered successfully!"));
Assert.assertEquals(true,errDump.contains("bucket_00002 recovered successfully!"));
Assert.assertEquals(true,errDump.contains("bucket_00003 recovered successfully!"));
List offsets=offsetMap.get("bucket_00000");
Assert.assertEquals(true,errDump.contains("Readable footerOffsets: " + offsets.toString()));
offsets=offsetMap.get("bucket_00001");
Assert.assertEquals(true,errDump.contains("Readable footerOffsets: " + offsets.toString()));
offsets=offsetMap.get("bucket_00002");
Assert.assertEquals(true,errDump.contains("Readable footerOffsets: " + offsets.toString()));
offsets=offsetMap.get("bucket_00003");
Assert.assertEquals(true,errDump.contains("Readable footerOffsets: " + offsets.toString()));
Assert.assertEquals(false,errDump.contains("Exception"));
Assert.assertEquals(false,errDump.contains("is still open for writes."));
origErr=System.err;
myErr=new ByteArrayOutputStream();
System.setErr(new PrintStream(myErr));
FileDump.main(new String[]{dbLocation});
System.err.flush();
System.setErr(origErr);
errDump=new String(myErr.toByteArray());
Assert.assertEquals(false,errDump.contains("Exception"));
Assert.assertEquals(false,errDump.contains("file(s) are corrupted"));
Assert.assertEquals(false,errDump.contains("is still open for writes."));
files=FileDump.getAllFilesInPath(path,conf);
for ( String file : files) {
Assert.assertEquals(false,file.contains("_flush_length"));
}
txnBatch.close();
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testBucketing() throws Exception {
dropDB(msClient,dbName3);
dropDB(msClient,dbName4);
String dbLocation=dbFolder.newFolder(dbName3).getCanonicalPath() + ".db";
dbLocation=dbLocation.replaceAll("\\\\","/");
String[] colNames="key1,key2,data".split(",");
String[] colTypes="string,int,string".split(",");
String[] bucketNames="key1,key2".split(",");
int bucketCount=4;
createDbAndTable(driver,dbName3,tblName3,null,colNames,colTypes,bucketNames,null,dbLocation,bucketCount);
String dbLocation2=dbFolder.newFolder(dbName4).getCanonicalPath() + ".db";
dbLocation2=dbLocation2.replaceAll("\\\\","/");
String[] colNames2="key3,key4,data2".split(",");
String[] colTypes2="string,int,string".split(",");
String[] bucketNames2="key3,key4".split(",");
createDbAndTable(driver,dbName4,tblName4,null,colNames2,colTypes2,bucketNames2,null,dbLocation2,bucketCount);
HiveEndPoint endPt=new HiveEndPoint(metaStoreURI,dbName3,tblName3,null);
DelimitedInputWriter writer=new DelimitedInputWriter(colNames,",",endPt);
StreamingConnection connection=endPt.newConnection(false);
TransactionBatch txnBatch=connection.fetchTransactionBatch(2,writer);
txnBatch.beginNextTransaction();
txnBatch.write("name0,1,Hello streaming".getBytes());
txnBatch.write("name2,2,Welcome to streaming".getBytes());
txnBatch.write("name4,2,more Streaming unlimited".getBytes());
txnBatch.write("name5,2,even more Streaming unlimited".getBytes());
txnBatch.commit();
HiveEndPoint endPt2=new HiveEndPoint(metaStoreURI,dbName4,tblName4,null);
DelimitedInputWriter writer2=new DelimitedInputWriter(colNames2,",",endPt2);
StreamingConnection connection2=endPt2.newConnection(false);
TransactionBatch txnBatch2=connection2.fetchTransactionBatch(2,writer2);
txnBatch2.beginNextTransaction();
txnBatch2.write("name5,2,fact3".getBytes());
txnBatch2.write("name8,2,fact3".getBytes());
txnBatch2.write("name0,1,fact1".getBytes());
txnBatch2.commit();
HashMap> actual1=dumpAllBuckets(dbLocation,tblName3);
HashMap> actual2=dumpAllBuckets(dbLocation2,tblName4);
System.err.println("\n Table 1");
System.err.println(actual1);
System.err.println("\n Table 2");
System.err.println(actual2);
Assert.assertEquals("number of buckets does not match expectation",actual1.values().size(),4);
Assert.assertEquals("records in bucket does not match expectation",actual1.get(0).size(),2);
Assert.assertEquals("records in bucket does not match expectation",actual1.get(1).size(),1);
Assert.assertEquals("records in bucket does not match expectation",actual1.get(2).size(),0);
Assert.assertEquals("records in bucket does not match expectation",actual1.get(3).size(),1);
}
InternalCallVerifier EqualityVerifier
@Test public void testMultipleTransactionBatchCommits() throws Exception {
HiveEndPoint endPt=new HiveEndPoint(metaStoreURI,dbName,tblName,partitionVals);
DelimitedInputWriter writer=new DelimitedInputWriter(fieldNames,",",endPt);
StreamingConnection connection=endPt.newConnection(true);
TransactionBatch txnBatch=connection.fetchTransactionBatch(10,writer);
txnBatch.beginNextTransaction();
txnBatch.write("1,Hello streaming".getBytes());
txnBatch.commit();
checkDataWritten(partLoc,1,10,1,1,"{1, Hello streaming}");
txnBatch.beginNextTransaction();
txnBatch.write("2,Welcome to streaming".getBytes());
txnBatch.commit();
checkDataWritten(partLoc,1,10,1,1,"{1, Hello streaming}","{2, Welcome to streaming}");
txnBatch.close();
txnBatch=connection.fetchTransactionBatch(10,writer);
txnBatch.beginNextTransaction();
txnBatch.write("3,Hello streaming - once again".getBytes());
txnBatch.commit();
checkDataWritten(partLoc,1,20,1,2,"{1, Hello streaming}","{2, Welcome to streaming}","{3, Hello streaming - once again}");
txnBatch.beginNextTransaction();
txnBatch.write("4,Welcome to streaming - once again".getBytes());
txnBatch.commit();
checkDataWritten(partLoc,1,20,1,2,"{1, Hello streaming}","{2, Welcome to streaming}","{3, Hello streaming - once again}","{4, Welcome to streaming - once again}");
Assert.assertEquals(TransactionBatch.TxnState.COMMITTED,txnBatch.getCurrentTransactionState());
txnBatch.close();
connection.close();
}
IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testRemainingTransactions() throws Exception {
HiveEndPoint endPt=new HiveEndPoint(metaStoreURI,dbName,tblName,partitionVals);
DelimitedInputWriter writer=new DelimitedInputWriter(fieldNames,",",endPt);
StreamingConnection connection=endPt.newConnection(true);
TransactionBatch txnBatch=connection.fetchTransactionBatch(10,writer);
int batch=0;
int initialCount=txnBatch.remainingTransactions();
while (txnBatch.remainingTransactions() > 0) {
txnBatch.beginNextTransaction();
Assert.assertEquals(--initialCount,txnBatch.remainingTransactions());
for (int rec=0; rec < 2; ++rec) {
Assert.assertEquals(TransactionBatch.TxnState.OPEN,txnBatch.getCurrentTransactionState());
txnBatch.write((batch * rec + ",Hello streaming").getBytes());
}
txnBatch.commit();
Assert.assertEquals(TransactionBatch.TxnState.COMMITTED,txnBatch.getCurrentTransactionState());
++batch;
}
Assert.assertEquals(0,txnBatch.remainingTransactions());
txnBatch.close();
Assert.assertEquals(TransactionBatch.TxnState.INACTIVE,txnBatch.getCurrentTransactionState());
txnBatch=connection.fetchTransactionBatch(10,writer);
batch=0;
initialCount=txnBatch.remainingTransactions();
while (txnBatch.remainingTransactions() > 0) {
txnBatch.beginNextTransaction();
Assert.assertEquals(--initialCount,txnBatch.remainingTransactions());
for (int rec=0; rec < 2; ++rec) {
Assert.assertEquals(TransactionBatch.TxnState.OPEN,txnBatch.getCurrentTransactionState());
txnBatch.write((batch * rec + ",Hello streaming").getBytes());
}
txnBatch.abort();
Assert.assertEquals(TransactionBatch.TxnState.ABORTED,txnBatch.getCurrentTransactionState());
++batch;
}
Assert.assertEquals(0,txnBatch.remainingTransactions());
txnBatch.close();
Assert.assertEquals(TransactionBatch.TxnState.INACTIVE,txnBatch.getCurrentTransactionState());
connection.close();
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testHeartbeat() throws Exception {
HiveEndPoint endPt=new HiveEndPoint(metaStoreURI,dbName2,tblName2,null);
DelimitedInputWriter writer=new DelimitedInputWriter(fieldNames2,",",endPt);
StreamingConnection connection=endPt.newConnection(false,null);
TransactionBatch txnBatch=connection.fetchTransactionBatch(5,writer);
txnBatch.beginNextTransaction();
ShowLocksResponse response=msClient.showLocks();
Assert.assertEquals("Wrong nubmer of locks: " + response,1,response.getLocks().size());
ShowLocksResponseElement lock=response.getLocks().get(0);
long acquiredAt=lock.getAcquiredat();
long heartbeatAt=lock.getLastheartbeat();
txnBatch.heartbeat();
response=msClient.showLocks();
Assert.assertEquals("Wrong number of locks2: " + response,1,response.getLocks().size());
lock=response.getLocks().get(0);
Assert.assertEquals("Acquired timestamp didn't match",acquiredAt,lock.getAcquiredat());
Assert.assertTrue("Expected new heartbeat (" + lock.getLastheartbeat() + ") == old heartbeat("+ heartbeatAt+ ")",lock.getLastheartbeat() == heartbeatAt);
}
InternalCallVerifier EqualityVerifier
@Test public void testTransactionBatchEmptyAbort() throws Exception {
HiveEndPoint endPt=new HiveEndPoint(metaStoreURI,dbName,tblName,partitionVals);
DelimitedInputWriter writer=new DelimitedInputWriter(fieldNames,",",endPt);
StreamingConnection connection=endPt.newConnection(true);
TransactionBatch txnBatch=connection.fetchTransactionBatch(10,writer);
txnBatch.beginNextTransaction();
txnBatch.abort();
Assert.assertEquals(TransactionBatch.TxnState.ABORTED,txnBatch.getCurrentTransactionState());
txnBatch.close();
connection.close();
endPt=new HiveEndPoint(metaStoreURI,dbName2,tblName2,null);
writer=new DelimitedInputWriter(fieldNames,",",endPt);
connection=endPt.newConnection(true);
txnBatch=connection.fetchTransactionBatch(10,writer);
txnBatch.beginNextTransaction();
txnBatch.abort();
Assert.assertEquals(TransactionBatch.TxnState.ABORTED,txnBatch.getCurrentTransactionState());
txnBatch.close();
connection.close();
}
InternalCallVerifier EqualityVerifier
@Test public void testTransactionBatchCommit_Json() throws Exception {
HiveEndPoint endPt=new HiveEndPoint(metaStoreURI,dbName,tblName,partitionVals);
StrictJsonWriter writer=new StrictJsonWriter(endPt);
StreamingConnection connection=endPt.newConnection(true);
TransactionBatch txnBatch=connection.fetchTransactionBatch(10,writer);
txnBatch.beginNextTransaction();
Assert.assertEquals(TransactionBatch.TxnState.OPEN,txnBatch.getCurrentTransactionState());
String rec1="{\"id\" : 1, \"msg\": \"Hello streaming\"}";
txnBatch.write(rec1.getBytes());
txnBatch.commit();
checkDataWritten(partLoc,1,10,1,1,"{1, Hello streaming}");
Assert.assertEquals(TransactionBatch.TxnState.COMMITTED,txnBatch.getCurrentTransactionState());
txnBatch.close();
Assert.assertEquals(TransactionBatch.TxnState.INACTIVE,txnBatch.getCurrentTransactionState());
connection.close();
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testFileDumpCorruptDataFiles() throws Exception {
dropDB(msClient,dbName3);
String dbLocation=dbFolder.newFolder(dbName3).getCanonicalPath() + ".db";
dbLocation=dbLocation.replaceAll("\\\\","/");
String[] colNames="key1,key2,data".split(",");
String[] colTypes="string,int,string".split(",");
String[] bucketNames="key1,key2".split(",");
int bucketCount=4;
createDbAndTable(driver,dbName3,tblName3,null,colNames,colTypes,bucketNames,null,dbLocation,bucketCount);
HiveEndPoint endPt=new HiveEndPoint(metaStoreURI,dbName3,tblName3,null);
DelimitedInputWriter writer=new DelimitedInputWriter(colNames,",",endPt);
StreamingConnection connection=endPt.newConnection(false);
TransactionBatch txnBatch=connection.fetchTransactionBatch(2,writer);
txnBatch.beginNextTransaction();
txnBatch.write("name0,1,Hello streaming".getBytes());
txnBatch.write("name2,2,Welcome to streaming".getBytes());
txnBatch.write("name4,2,more Streaming unlimited".getBytes());
txnBatch.write("name5,2,even more Streaming unlimited".getBytes());
txnBatch.commit();
Path path=new Path(dbLocation);
Collection files=FileDump.getAllFilesInPath(path,conf);
int readableFooter=-1;
for ( String file : files) {
if (file.contains("bucket_00000")) {
corruptDataFile(file,conf,Integer.MIN_VALUE);
}
else if (file.contains("bucket_00001")) {
corruptDataFile(file,conf,-1);
}
else if (file.contains("bucket_00002")) {
Path bPath=new Path(file);
FileSystem fs=bPath.getFileSystem(conf);
FileStatus fileStatus=fs.getFileStatus(bPath);
readableFooter=(int)fileStatus.getLen();
corruptDataFile(file,conf,2);
}
else if (file.contains("bucket_00003")) {
corruptDataFile(file,conf,100);
}
}
PrintStream origErr=System.err;
ByteArrayOutputStream myErr=new ByteArrayOutputStream();
System.setErr(new PrintStream(myErr));
FileDump.main(new String[]{dbLocation});
System.err.flush();
System.setErr(origErr);
String errDump=new String(myErr.toByteArray());
Assert.assertEquals(false,errDump.contains("Exception"));
Assert.assertEquals(true,errDump.contains("4 file(s) are corrupted"));
Assert.assertEquals(false,errDump.contains("is still open for writes."));
origErr=System.err;
myErr=new ByteArrayOutputStream();
System.setErr(new PrintStream(myErr));
FileDump.main(new String[]{dbLocation,"--recover","--skip-dump"});
System.err.flush();
System.setErr(origErr);
errDump=new String(myErr.toByteArray());
Assert.assertEquals(true,errDump.contains("bucket_00000 recovered successfully!"));
Assert.assertEquals(true,errDump.contains("No readable footers found. Creating empty orc file."));
Assert.assertEquals(true,errDump.contains("bucket_00001 recovered successfully!"));
Assert.assertEquals(true,errDump.contains("bucket_00002 recovered successfully!"));
Assert.assertEquals(true,errDump.contains("Readable footerOffsets: [" + readableFooter + "]"));
Assert.assertEquals(true,errDump.contains("bucket_00003 recovered successfully!"));
Assert.assertEquals(false,errDump.contains("Exception"));
Assert.assertEquals(false,errDump.contains("is still open for writes."));
origErr=System.err;
myErr=new ByteArrayOutputStream();
System.setErr(new PrintStream(myErr));
FileDump.main(new String[]{dbLocation});
System.err.flush();
System.setErr(origErr);
errDump=new String(myErr.toByteArray());
Assert.assertEquals(false,errDump.contains("Exception"));
Assert.assertEquals(false,errDump.contains("file(s) are corrupted"));
Assert.assertEquals(false,errDump.contains("is still open for writes."));
files=FileDump.getAllFilesInPath(path,conf);
for ( String file : files) {
Assert.assertEquals(false,file.contains("_flush_length"));
}
txnBatch.close();
}
InternalCallVerifier EqualityVerifier
@Test public void testTransactionBatchCommit_Delimited() throws Exception {
HiveEndPoint endPt=new HiveEndPoint(metaStoreURI,dbName,tblName,partitionVals);
DelimitedInputWriter writer=new DelimitedInputWriter(fieldNames,",",endPt);
StreamingConnection connection=endPt.newConnection(true);
TransactionBatch txnBatch=connection.fetchTransactionBatch(10,writer);
txnBatch.beginNextTransaction();
Assert.assertEquals(TransactionBatch.TxnState.OPEN,txnBatch.getCurrentTransactionState());
txnBatch.write("1,Hello streaming".getBytes());
txnBatch.commit();
checkDataWritten(partLoc,1,10,1,1,"{1, Hello streaming}");
Assert.assertEquals(TransactionBatch.TxnState.COMMITTED,txnBatch.getCurrentTransactionState());
txnBatch.beginNextTransaction();
Assert.assertEquals(TransactionBatch.TxnState.OPEN,txnBatch.getCurrentTransactionState());
txnBatch.write("2,Welcome to streaming".getBytes());
checkDataWritten(partLoc,1,10,1,1,"{1, Hello streaming}");
txnBatch.commit();
checkDataWritten(partLoc,1,10,1,1,"{1, Hello streaming}","{2, Welcome to streaming}");
txnBatch.close();
Assert.assertEquals(TransactionBatch.TxnState.INACTIVE,txnBatch.getCurrentTransactionState());
connection.close();
endPt=new HiveEndPoint(metaStoreURI,dbName2,tblName2,null);
writer=new DelimitedInputWriter(fieldNames,",",endPt);
connection=endPt.newConnection(true);
txnBatch=connection.fetchTransactionBatch(10,writer);
txnBatch.beginNextTransaction();
Assert.assertEquals(TransactionBatch.TxnState.OPEN,txnBatch.getCurrentTransactionState());
txnBatch.write("1,Hello streaming".getBytes());
txnBatch.commit();
Assert.assertEquals(TransactionBatch.TxnState.COMMITTED,txnBatch.getCurrentTransactionState());
connection.close();
}
InternalCallVerifier EqualityVerifier
@Test public void testTransactionBatchAbort() throws Exception {
HiveEndPoint endPt=new HiveEndPoint(metaStoreURI,dbName,tblName,partitionVals);
DelimitedInputWriter writer=new DelimitedInputWriter(fieldNames,",",endPt);
StreamingConnection connection=endPt.newConnection(false);
TransactionBatch txnBatch=connection.fetchTransactionBatch(10,writer);
txnBatch.beginNextTransaction();
txnBatch.write("1,Hello streaming".getBytes());
txnBatch.write("2,Welcome to streaming".getBytes());
txnBatch.abort();
checkNothingWritten(partLoc);
Assert.assertEquals(TransactionBatch.TxnState.ABORTED,txnBatch.getCurrentTransactionState());
txnBatch.close();
connection.close();
checkNothingWritten(partLoc);
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testErrorHandling() throws Exception {
runCmdOnDriver("create database testErrors");
runCmdOnDriver("use testErrors");
runCmdOnDriver("create table T(a int, b int) clustered by (b) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')");
HiveEndPoint endPt=new HiveEndPoint(metaStoreURI,"testErrors","T",null);
DelimitedInputWriter innerWriter=new DelimitedInputWriter("a,b".split(","),",",endPt);
FaultyWriter writer=new FaultyWriter(innerWriter);
StreamingConnection connection=endPt.newConnection(false);
TransactionBatch txnBatch=connection.fetchTransactionBatch(2,writer);
txnBatch.close();
txnBatch.heartbeat();
txnBatch.abort();
GetOpenTxnsInfoResponse r=msClient.showTxns();
Assert.assertEquals("HWM didn't match",2,r.getTxn_high_water_mark());
List ti=r.getOpen_txns();
Assert.assertEquals("wrong status ti(0)",TxnState.ABORTED,ti.get(0).getState());
Assert.assertEquals("wrong status ti(1)",TxnState.ABORTED,ti.get(1).getState());
Exception expectedEx=null;
try {
txnBatch.beginNextTransaction();
}
catch ( IllegalStateException ex) {
expectedEx=ex;
}
Assert.assertTrue("beginNextTransaction() should have failed",expectedEx != null && expectedEx.getMessage().contains("has been closed()"));
expectedEx=null;
try {
txnBatch.write("name0,1,Hello streaming".getBytes());
}
catch ( IllegalStateException ex) {
expectedEx=ex;
}
Assert.assertTrue("write() should have failed",expectedEx != null && expectedEx.getMessage().contains("has been closed()"));
expectedEx=null;
try {
txnBatch.commit();
}
catch ( IllegalStateException ex) {
expectedEx=ex;
}
Assert.assertTrue("commit() should have failed",expectedEx != null && expectedEx.getMessage().contains("has been closed()"));
txnBatch=connection.fetchTransactionBatch(2,writer);
txnBatch.beginNextTransaction();
txnBatch.write("name2,2,Welcome to streaming".getBytes());
txnBatch.write("name4,2,more Streaming unlimited".getBytes());
txnBatch.write("name5,2,even more Streaming unlimited".getBytes());
txnBatch.commit();
expectedEx=null;
txnBatch.beginNextTransaction();
writer.enableErrors();
try {
txnBatch.write("name6,2,Doh!".getBytes());
}
catch ( StreamingIOFailure ex) {
expectedEx=ex;
}
Assert.assertTrue("Wrong exception: " + (expectedEx != null ? expectedEx.getMessage() : "?"),expectedEx != null && expectedEx.getMessage().contains("Simulated fault occurred"));
expectedEx=null;
try {
txnBatch.commit();
}
catch ( IllegalStateException ex) {
expectedEx=ex;
}
Assert.assertTrue("commit() should have failed",expectedEx != null && expectedEx.getMessage().contains("has been closed()"));
r=msClient.showTxns();
Assert.assertEquals("HWM didn't match",4,r.getTxn_high_water_mark());
ti=r.getOpen_txns();
Assert.assertEquals("wrong status ti(0)",TxnState.ABORTED,ti.get(0).getState());
Assert.assertEquals("wrong status ti(1)",TxnState.ABORTED,ti.get(1).getState());
Assert.assertEquals("wrong status ti(2)",TxnState.ABORTED,ti.get(2).getState());
writer.disableErrors();
txnBatch=connection.fetchTransactionBatch(2,writer);
txnBatch.beginNextTransaction();
txnBatch.write("name2,2,Welcome to streaming".getBytes());
writer.enableErrors();
expectedEx=null;
try {
txnBatch.commit();
}
catch ( StreamingIOFailure ex) {
expectedEx=ex;
}
Assert.assertTrue("Wrong exception: " + (expectedEx != null ? expectedEx.getMessage() : "?"),expectedEx != null && expectedEx.getMessage().contains("Simulated fault occurred"));
r=msClient.showTxns();
Assert.assertEquals("HWM didn't match",6,r.getTxn_high_water_mark());
ti=r.getOpen_txns();
Assert.assertEquals("wrong status ti(3)",TxnState.ABORTED,ti.get(3).getState());
Assert.assertEquals("wrong status ti(4)",TxnState.ABORTED,ti.get(4).getState());
txnBatch.abort();
}
APIUtilityVerifier EqualityVerifier
@Test public void testFileDump() throws Exception {
dropDB(msClient,dbName3);
dropDB(msClient,dbName4);
String dbLocation=dbFolder.newFolder(dbName3).getCanonicalPath() + ".db";
dbLocation=dbLocation.replaceAll("\\\\","/");
String[] colNames="key1,key2,data".split(",");
String[] colTypes="string,int,string".split(",");
String[] bucketNames="key1,key2".split(",");
int bucketCount=4;
createDbAndTable(driver,dbName3,tblName3,null,colNames,colTypes,bucketNames,null,dbLocation,bucketCount);
String dbLocation2=dbFolder.newFolder(dbName4).getCanonicalPath() + ".db";
dbLocation2=dbLocation2.replaceAll("\\\\","/");
String[] colNames2="key3,key4,data2".split(",");
String[] colTypes2="string,int,string".split(",");
String[] bucketNames2="key3,key4".split(",");
createDbAndTable(driver,dbName4,tblName4,null,colNames2,colTypes2,bucketNames2,null,dbLocation2,bucketCount);
HiveEndPoint endPt=new HiveEndPoint(metaStoreURI,dbName3,tblName3,null);
DelimitedInputWriter writer=new DelimitedInputWriter(colNames,",",endPt);
StreamingConnection connection=endPt.newConnection(false);
TransactionBatch txnBatch=connection.fetchTransactionBatch(2,writer);
txnBatch.beginNextTransaction();
txnBatch.write("name0,1,Hello streaming".getBytes());
txnBatch.write("name2,2,Welcome to streaming".getBytes());
txnBatch.write("name4,2,more Streaming unlimited".getBytes());
txnBatch.write("name5,2,even more Streaming unlimited".getBytes());
txnBatch.commit();
PrintStream origErr=System.err;
ByteArrayOutputStream myErr=new ByteArrayOutputStream();
System.setErr(new PrintStream(myErr));
FileDump.main(new String[]{dbLocation});
System.err.flush();
System.setErr(origErr);
String errDump=new String(myErr.toByteArray());
Assert.assertEquals(false,errDump.contains("file(s) are corrupted"));
Assert.assertEquals(false,errDump.contains("is still open for writes."));
HiveEndPoint endPt2=new HiveEndPoint(metaStoreURI,dbName4,tblName4,null);
DelimitedInputWriter writer2=new DelimitedInputWriter(colNames2,",",endPt2);
StreamingConnection connection2=endPt2.newConnection(false);
TransactionBatch txnBatch2=connection2.fetchTransactionBatch(2,writer2);
txnBatch2.beginNextTransaction();
txnBatch2.write("name5,2,fact3".getBytes());
txnBatch2.write("name8,2,fact3".getBytes());
txnBatch2.write("name0,1,fact1".getBytes());
txnBatch2.commit();
origErr=System.err;
myErr=new ByteArrayOutputStream();
System.setErr(new PrintStream(myErr));
FileDump.main(new String[]{dbLocation});
System.out.flush();
System.err.flush();
System.setErr(origErr);
errDump=new String(myErr.toByteArray());
Assert.assertEquals(false,errDump.contains("Exception"));
Assert.assertEquals(false,errDump.contains("file(s) are corrupted"));
Assert.assertEquals(false,errDump.contains("is still open for writes."));
}
InternalCallVerifier EqualityVerifier
@Test public void testTransactionBatchEmptyCommit() throws Exception {
HiveEndPoint endPt=new HiveEndPoint(metaStoreURI,dbName,tblName,partitionVals);
DelimitedInputWriter writer=new DelimitedInputWriter(fieldNames,",",endPt);
StreamingConnection connection=endPt.newConnection(false,null);
TransactionBatch txnBatch=connection.fetchTransactionBatch(10,writer);
txnBatch.beginNextTransaction();
txnBatch.commit();
Assert.assertEquals(TransactionBatch.TxnState.COMMITTED,txnBatch.getCurrentTransactionState());
txnBatch.close();
connection.close();
endPt=new HiveEndPoint(metaStoreURI,dbName2,tblName2,null);
writer=new DelimitedInputWriter(fieldNames2,",",endPt);
connection=endPt.newConnection(false,null);
txnBatch=connection.fetchTransactionBatch(10,writer);
txnBatch.beginNextTransaction();
txnBatch.commit();
Assert.assertEquals(TransactionBatch.TxnState.COMMITTED,txnBatch.getCurrentTransactionState());
txnBatch.close();
connection.close();
}
Class: org.apache.hive.hcatalog.streaming.mutate.client.lock.TestLock InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testAcquireReadLockWithNoIssues() throws Exception {
readLock.acquire();
assertEquals(Long.valueOf(LOCK_ID),readLock.getLockId());
assertNull(readLock.getTransactionId());
}
InternalCallVerifier EqualityVerifier
@Test public void testAcquireTxnLockWithNoIssues() throws Exception {
writeLock.acquire(TRANSACTION_ID);
assertEquals(Long.valueOf(LOCK_ID),writeLock.getLockId());
assertEquals(Long.valueOf(TRANSACTION_ID),writeLock.getTransactionId());
}
InternalCallVerifier EqualityVerifier
@Test public void testAcquireLockWithWaitRetries() throws Exception {
when(mockLockResponse.getState()).thenReturn(WAITING,WAITING,ACQUIRED);
readLock.acquire();
assertEquals(Long.valueOf(LOCK_ID),readLock.getLockId());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testAcquireReadLockCheckLocks() throws Exception {
readLock.acquire();
verify(mockMetaStoreClient).lock(requestCaptor.capture());
LockRequest request=requestCaptor.getValue();
assertEquals(0,request.getTxnid());
assertEquals(USER,request.getUser());
assertEquals(InetAddress.getLocalHost().getHostName(),request.getHostname());
List components=request.getComponent();
assertEquals(2,components.size());
LockComponent expected1=new LockComponent(LockType.SHARED_READ,LockLevel.TABLE,"DB");
expected1.setTablename("SOURCE_1");
assertTrue(components.contains(expected1));
LockComponent expected2=new LockComponent(LockType.SHARED_READ,LockLevel.TABLE,"DB");
expected2.setTablename("SOURCE_2");
assertTrue(components.contains(expected2));
}
InternalCallVerifier EqualityVerifier
@Test public void testAcquireLockCheckUser() throws Exception {
readLock.acquire();
verify(mockMetaStoreClient).lock(requestCaptor.capture());
LockRequest actualRequest=requestCaptor.getValue();
assertEquals(USER,actualRequest.getUser());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testAcquireTxnLockCheckLocks() throws Exception {
writeLock.acquire(TRANSACTION_ID);
verify(mockMetaStoreClient).lock(requestCaptor.capture());
LockRequest request=requestCaptor.getValue();
assertEquals(TRANSACTION_ID,request.getTxnid());
assertEquals(USER,request.getUser());
assertEquals(InetAddress.getLocalHost().getHostName(),request.getHostname());
List components=request.getComponent();
assertEquals(3,components.size());
LockComponent expected1=new LockComponent(LockType.SHARED_READ,LockLevel.TABLE,"DB");
expected1.setTablename("SOURCE_1");
assertTrue(components.contains(expected1));
LockComponent expected2=new LockComponent(LockType.SHARED_READ,LockLevel.TABLE,"DB");
expected2.setTablename("SOURCE_2");
assertTrue(components.contains(expected2));
LockComponent expected3=new LockComponent(LockType.SHARED_WRITE,LockLevel.TABLE,"DB");
expected3.setTablename("SINK");
assertTrue(components.contains(expected3));
}
Class: org.apache.hive.hcatalog.templeton.TestWebHCatE2e EqualityVerifier PublicFieldVerifier
@Test public void getPigVersion() throws Exception {
MethodCallRetVal p=doHttpCall(templetonBaseUrl + "/version/pig",HTTP_METHOD_TYPE.GET);
Assert.assertEquals(HttpStatus.NOT_IMPLEMENTED_501,p.httpStatusCode);
Map props=JsonBuilder.jsonToMap(p.responseBody);
Assert.assertEquals(p.getAssertMsg(),"Pig version request not yet " + "implemented",(String)props.get("error"));
}
APIUtilityVerifier EqualityVerifier IgnoredMethod HybridVerifier
@Ignore("not ready due to HIVE-4824") @Test public void describeNoSuchTable() throws IOException {
MethodCallRetVal p=doHttpCall(templetonBaseUrl + "/ddl/database/default/table/no_such_table",HTTP_METHOD_TYPE.GET);
Assert.assertEquals(p.getAssertMsg(),HttpStatus.NOT_FOUND_404,p.httpStatusCode);
Assert.assertEquals(p.getAssertMsg(),ErrorMsg.INVALID_TABLE.getErrorCode(),getErrorCode(p.responseBody));
}
BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void getHiveVersion() throws Exception {
MethodCallRetVal p=doHttpCall(templetonBaseUrl + "/version/hive",HTTP_METHOD_TYPE.GET);
Assert.assertEquals(HttpStatus.OK_200,p.httpStatusCode);
Map props=JsonBuilder.jsonToMap(p.responseBody);
Assert.assertEquals("hive",props.get("module"));
Assert.assertTrue(p.getAssertMsg(),((String)props.get("version")).matches("[0-9]+.[0-9]+.[0-9]+.*"));
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void getStatus() throws IOException {
LOG.debug("+getStatus()");
MethodCallRetVal p=doHttpCall(templetonBaseUrl + "/status",HTTP_METHOD_TYPE.GET);
Assert.assertEquals(p.getAssertMsg(),HttpStatus.OK_200,p.httpStatusCode);
Assert.assertTrue(p.getAssertMsg(),jsonStringToSortedMap("{\"status\":\"ok\",\"version\":\"v1\"}").equals(jsonStringToSortedMap(p.responseBody)));
LOG.debug("-getStatus()");
}
APIUtilityVerifier EqualityVerifier IgnoredMethod HybridVerifier
@Ignore("not ready due to HIVE-4824") @Test public void createTable() throws IOException {
Map props=new HashMap();
props.put("comment","Table in default db");
Map col=new HashMap();
col.put("name","col1");
col.put("type","string");
List> colList=new ArrayList>(1);
colList.add(col);
props.put("columns",colList);
Map format=new HashMap();
format.put("storedAs","rcfile");
props.put("format",format);
MethodCallRetVal createTbl=doHttpCall(templetonBaseUrl + "/ddl/database/default/table/test_table",HTTP_METHOD_TYPE.PUT,props,null);
Assert.assertEquals(createTbl.getAssertMsg(),HttpStatus.OK_200,createTbl.httpStatusCode);
LOG.info("createTable() resp: " + createTbl.responseBody);
MethodCallRetVal descTbl=doHttpCall(templetonBaseUrl + "/ddl/database/default/table/test_table",HTTP_METHOD_TYPE.GET);
Assert.assertEquals(descTbl.getAssertMsg(),HttpStatus.OK_200,descTbl.httpStatusCode);
}
APIUtilityVerifier EqualityVerifier IgnoredMethod HybridVerifier
@Ignore("not ready due to HIVE-4824") @Test public void listDataBases() throws IOException {
LOG.debug("+listDataBases()");
MethodCallRetVal p=doHttpCall(templetonBaseUrl + "/ddl/database",HTTP_METHOD_TYPE.GET);
Assert.assertEquals(p.getAssertMsg(),HttpStatus.OK_200,p.httpStatusCode);
Assert.assertEquals(p.getAssertMsg(),"{\"databases\":[\"default\"]}",p.responseBody);
LOG.debug("-listDataBases()");
}
APIUtilityVerifier EqualityVerifier IgnoredMethod HybridVerifier
@Ignore("not ready due to HIVE-4824") @Test public void createDataBase() throws IOException {
Map props=new HashMap();
props.put("comment","Hello, there");
props.put("location",System.getProperty("test.warehouse.dir"));
Map props2=new HashMap();
props2.put("prop","val");
props.put("properties",props2);
MethodCallRetVal p=doHttpCall(templetonBaseUrl + "/ddl/database/newdb",HTTP_METHOD_TYPE.PUT,props,null);
Assert.assertEquals(p.getAssertMsg(),HttpStatus.OK_200,p.httpStatusCode);
}
APIUtilityVerifier EqualityVerifier IgnoredMethod HybridVerifier
/**
* tries to drop table in a DB that doesn't exist
*/
@Ignore("not ready due to HIVE-4824") @Test public void dropTableNoSuchDB() throws IOException {
MethodCallRetVal p=doHttpCall(templetonBaseUrl + "/ddl/database/no_such_db/table/t1",HTTP_METHOD_TYPE.DELETE);
Assert.assertEquals(p.getAssertMsg(),HttpStatus.NOT_FOUND_404,p.httpStatusCode);
Assert.assertEquals(p.getAssertMsg(),ErrorMsg.DATABASE_NOT_EXISTS.getErrorCode(),getErrorCode(p.responseBody));
}
EqualityVerifier IgnoredMethod HybridVerifier
/**
* tries to drop table that doesn't exist (with ifExists=true)
*/
@Ignore("not ready due to HIVE-4824") @Test public void dropTableIfExists() throws IOException {
MethodCallRetVal p=doHttpCall(templetonBaseUrl + "/ddl/database/default/table/no_such_table",HTTP_METHOD_TYPE.DELETE,null,new NameValuePair[]{new NameValuePair("ifExists","true")});
Assert.assertEquals(p.getAssertMsg(),HttpStatus.OK_200,p.httpStatusCode);
}
EqualityVerifier
/**
* Check that we return correct status code when the URL doesn't map to any method
* in {@link Server}
*/
@Test public void invalidPath() throws IOException {
MethodCallRetVal p=doHttpCall(templetonBaseUrl + "/no_such_mapping/database",HTTP_METHOD_TYPE.GET);
Assert.assertEquals(p.getAssertMsg(),HttpStatus.NOT_FOUND_404,p.httpStatusCode);
}
BooleanVerifier EqualityVerifier PublicFieldVerifier HybridVerifier
@Test public void getHadoopVersion() throws Exception {
MethodCallRetVal p=doHttpCall(templetonBaseUrl + "/version/hadoop",HTTP_METHOD_TYPE.GET);
Assert.assertEquals(HttpStatus.OK_200,p.httpStatusCode);
Map props=JsonBuilder.jsonToMap(p.responseBody);
Assert.assertEquals("hadoop",props.get("module"));
Assert.assertTrue(p.getAssertMsg(),((String)props.get("version")).matches("[1-2].[0-9]+.[0-9]+.*"));
}
APIUtilityVerifier EqualityVerifier IgnoredMethod HybridVerifier
/**
* tries to drop table in a DB that doesn't exist
*/
@Ignore("not ready due to HIVE-4824") @Test public void dropTableNoSuchDbIfExists() throws IOException {
MethodCallRetVal p=doHttpCall(templetonBaseUrl + "/ddl/database/no_such_db/table/t1",HTTP_METHOD_TYPE.DELETE,null,new NameValuePair[]{new NameValuePair("ifExists","true")});
Assert.assertEquals(p.getAssertMsg(),HttpStatus.NOT_FOUND_404,p.httpStatusCode);
Assert.assertEquals(p.getAssertMsg(),ErrorMsg.DATABASE_NOT_EXISTS.getErrorCode(),getErrorCode(p.responseBody));
}
Class: org.apache.hive.hcatalog.templeton.tool.TestJobIDParser InternalCallVerifier EqualityVerifier
@Test public void testParseHive() throws IOException {
String errFileName="src/test/data/status/hive";
HiveJobIDParser hiveJobIDParser=new HiveJobIDParser(errFileName,new Configuration());
List jobs=hiveJobIDParser.parseJobID();
Assert.assertEquals(jobs.size(),1);
}
InternalCallVerifier EqualityVerifier
@Test public void testParsePig() throws IOException {
String errFileName="src/test/data/status/pig";
PigJobIDParser pigJobIDParser=new PigJobIDParser(errFileName,new Configuration());
List jobs=pigJobIDParser.parseJobID();
Assert.assertEquals(jobs.size(),1);
}
InternalCallVerifier EqualityVerifier
@Test public void testParseStreaming() throws IOException {
String errFileName="src/test/data/status/streaming";
JarJobIDParser jarJobIDParser=new JarJobIDParser(errFileName,new Configuration());
List jobs=jarJobIDParser.parseJobID();
Assert.assertEquals(jobs.size(),1);
}
InternalCallVerifier EqualityVerifier
@Test public void testParseJar() throws IOException {
String errFileName="src/test/data/status/jar";
JarJobIDParser jarJobIDParser=new JarJobIDParser(errFileName,new Configuration());
List jobs=jarJobIDParser.parseJobID();
Assert.assertEquals(jobs.size(),1);
}
Class: org.apache.hive.hcatalog.templeton.tool.TestTempletonUtils BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testEncodeArray(){
Assert.assertEquals(null,TempletonUtils.encodeArray((String[])null));
String[] tmp=new String[0];
Assert.assertTrue(TempletonUtils.encodeArray(new String[0]).length() == 0);
tmp=new String[3];
tmp[0]="fred";
tmp[1]=null;
tmp[2]="peter,lisa,, barney";
Assert.assertEquals("fred,,peter" + StringUtils.ESCAPE_CHAR + ",lisa"+ StringUtils.ESCAPE_CHAR+ ","+ StringUtils.ESCAPE_CHAR+ ", barney",TempletonUtils.encodeArray(tmp));
}
APIUtilityVerifier IterativeVerifier UtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testDecodeArray(){
Assert.assertTrue(TempletonUtils.encodeArray((String[])null) == null);
String[] tmp=new String[3];
tmp[0]="fred";
tmp[1]=null;
tmp[2]="peter,lisa,, barney";
String[] tmp2=TempletonUtils.decodeArray(TempletonUtils.encodeArray(tmp));
try {
for (int i=0; i < tmp.length; i++) {
Assert.assertEquals((String)tmp[i],(String)tmp2[i]);
}
}
catch ( Exception e) {
Assert.fail("Arrays were not equal" + e.getMessage());
}
}
EqualityVerifier NullVerifier HybridVerifier
@Test public void testExtractPercentComplete(){
Assert.assertNull(TempletonUtils.extractPercentComplete("fred"));
for ( String line : CONTROLLER_LINES) {
Assert.assertNull(TempletonUtils.extractPercentComplete(line));
}
String fifty="2011-12-15 18:12:36,333 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher - 50% complete";
Assert.assertEquals("50% complete",TempletonUtils.extractPercentComplete(fifty));
}
APIUtilityVerifier IterativeVerifier UtilityVerifier EqualityVerifier HybridVerifier
@Test public void testConstructingUserHomeDirectory() throws Exception {
String[] sources=new String[]{"output+","/user/hadoop/output","hdfs://container","hdfs://container/","hdfs://container/path","output#link","hdfs://cointaner/output#link","hdfs://container@acc/test"};
String[] expectedResults=new String[]{"/user/webhcat/output+","/user/hadoop/output","hdfs://container/user/webhcat","hdfs://container/","hdfs://container/path","/user/webhcat/output#link","hdfs://cointaner/output#link","hdfs://container@acc/test"};
for (int i=0; i < sources.length; i++) {
String source=sources[i];
String expectedResult=expectedResults[i];
String result=TempletonUtils.addUserHomeDirectoryIfApplicable(source,"webhcat");
Assert.assertEquals(result,expectedResult);
}
String badUri="c:\\some\\path";
try {
TempletonUtils.addUserHomeDirectoryIfApplicable(badUri,"webhcat");
Assert.fail("addUserHomeDirectoryIfApplicable should fail for bad URI: " + badUri);
}
catch ( URISyntaxException ex) {
}
}
UtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testHadoopFsListAsString(){
try {
String tmpFileName1="/tmp/testHadoopFsListAsString1";
String tmpFileName2="/tmp/testHadoopFsListAsString2";
File tmpFile1=new File(tmpFileName1);
File tmpFile2=new File(tmpFileName2);
tmpFile1.createNewFile();
tmpFile2.createNewFile();
Assert.assertTrue(TempletonUtils.hadoopFsListAsString(null,null,null) == null);
Assert.assertTrue(TempletonUtils.hadoopFsListAsString("/tmp,/usr",null,null) == null);
Assert.assertEquals("file:" + tmpFileName1 + ",file:"+ tmpFileName2,TempletonUtils.hadoopFsListAsString(tmpFileName1 + "," + tmpFileName2,new Configuration(),null));
}
catch ( FileNotFoundException e) {
Assert.fail("Couldn't find name for " + tmpFile.toURI().toString());
}
catch ( Exception e) {
e.printStackTrace();
}
try {
TempletonUtils.hadoopFsListAsString("/scoobydoo/teddybear,joe",new Configuration(),null);
Assert.fail("Should not have found /scoobydoo/teddybear");
}
catch ( FileNotFoundException e) {
}
catch ( Exception e) {
e.printStackTrace();
}
}
UtilityVerifier EqualityVerifier HybridVerifier
@Test public void testHadoopFsFilename(){
try {
String tmpFileName1="/tmp/testHadoopFsListAsArray1";
String tmpFileName2="/tmp/testHadoopFsListAsArray2";
File tmpFile1=new File(tmpFileName1);
File tmpFile2=new File(tmpFileName2);
tmpFile1.createNewFile();
tmpFile2.createNewFile();
Assert.assertEquals(null,TempletonUtils.hadoopFsFilename(null,null,null));
Assert.assertEquals(null,TempletonUtils.hadoopFsFilename(tmpFile.toURI().toString(),null,null));
Assert.assertEquals(tmpFile.toURI().toString(),TempletonUtils.hadoopFsFilename(tmpFile.toURI().toString(),new Configuration(),null));
}
catch ( FileNotFoundException e) {
Assert.fail("Couldn't find name for /tmp");
Assert.fail("Couldn't find name for " + tmpFile.toURI().toString());
}
catch ( Exception e) {
e.printStackTrace();
}
try {
TempletonUtils.hadoopFsFilename("/scoobydoo/teddybear",new Configuration(),null);
Assert.fail("Should not have found /scoobydoo/teddybear");
}
catch ( FileNotFoundException e) {
}
catch ( Exception e) {
e.printStackTrace();
}
}
UtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testHadoopFsListAsArray(){
try {
String tmpFileName1="/tmp/testHadoopFsListAsArray1";
String tmpFileName2="/tmp/testHadoopFsListAsArray2";
File tmpFile1=new File(tmpFileName1);
File tmpFile2=new File(tmpFileName2);
tmpFile1.createNewFile();
tmpFile2.createNewFile();
Assert.assertTrue(TempletonUtils.hadoopFsListAsArray(null,null,null) == null);
Assert.assertTrue(TempletonUtils.hadoopFsListAsArray(tmpFileName1 + "," + tmpFileName2,null,null) == null);
String[] tmp2=TempletonUtils.hadoopFsListAsArray(tmpFileName1 + "," + tmpFileName2,new Configuration(),null);
Assert.assertEquals("file:" + tmpFileName1,tmp2[0]);
Assert.assertEquals("file:" + tmpFileName2,tmp2[1]);
tmpFile1.delete();
tmpFile2.delete();
}
catch ( FileNotFoundException e) {
Assert.fail("Couldn't find name for " + tmpFile.toURI().toString());
}
catch ( Exception e) {
e.printStackTrace();
}
try {
TempletonUtils.hadoopFsListAsArray("/scoobydoo/teddybear,joe",new Configuration(),null);
Assert.fail("Should not have found /scoobydoo/teddybear");
}
catch ( FileNotFoundException e) {
}
catch ( Exception e) {
e.printStackTrace();
}
}
APIUtilityVerifier IterativeVerifier EqualityVerifier
@Test public void testPropertiesParsing() throws Exception {
String[] props={"hive.metastore.uris=thrift://localhost:9933\\,thrift://127.0.0.1:9933","hive.metastore.sasl.enabled=false","hive.some.fake.path=C:\\foo\\bar.txt\\"};
StringBuilder input=new StringBuilder();
for ( String prop : props) {
if (input.length() > 0) {
input.append(',');
}
input.append(prop);
}
String[] newProps=StringUtils.split(input.toString());
for (int i=0; i < newProps.length; i++) {
Assert.assertEquals("Pre/post split values don't match",TempletonUtils.unEscapeString(props[i]),TempletonUtils.unEscapeString(newProps[i]));
}
}
Class: org.apache.hive.hcatalog.templeton.tool.TestTrivialExecService APIUtilityVerifier IterativeVerifier UtilityVerifier EqualityVerifier HybridVerifier
@Test public void test(){
ArrayList list=new ArrayList();
list.add("echo");
list.add("success");
BufferedReader out=null;
BufferedReader err=null;
try {
Process process=TrivialExecService.getInstance().run(list,new ArrayList(),new HashMap());
out=new BufferedReader(new InputStreamReader(process.getInputStream()));
err=new BufferedReader(new InputStreamReader(process.getErrorStream()));
Assert.assertEquals("success",out.readLine());
out.close();
String line;
while ((line=err.readLine()) != null) {
Assert.fail(line);
}
process.waitFor();
}
catch ( Exception e) {
e.printStackTrace();
Assert.fail("Process caused exception.");
}
finally {
try {
out.close();
}
catch ( Exception ex) {
}
try {
err.close();
}
catch ( Exception ex) {
}
}
}
Class: org.apache.hive.jdbc.HiveStatementTest InternalCallVerifier EqualityVerifier
@Test public void testSetFetchSize2() throws SQLException {
HiveStatement stmt=new HiveStatement(null,null,null);
int initial=stmt.getFetchSize();
stmt.setFetchSize(0);
assertEquals(initial,stmt.getFetchSize());
}
InternalCallVerifier EqualityVerifier
@Test public void testSetFetchSize1() throws SQLException {
HiveStatement stmt=new HiveStatement(null,null,null);
stmt.setFetchSize(123);
assertEquals(123,stmt.getFetchSize());
}
Class: org.apache.hive.jdbc.TestJdbcDriver2 APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
/**
* verify 'explain ...' resultset
* @throws SQLException
*/
@Test public void testExplainStmt() throws SQLException {
Statement stmt=con.createStatement();
ResultSet res=stmt.executeQuery("explain select c1, c2, c3, c4, c5 as a, c6, c7, c8, c9, c10, c11, c12, " + "c1*2, sentences(null, null, null) as b, c23 from " + dataTypeTableName + " limit 1");
ResultSetMetaData md=res.getMetaData();
assertEquals(md.getColumnCount(),1);
assertEquals(md.getColumnLabel(1),EXPL_COLUMN_NAME);
assertTrue("Nothing returned explain",res.next());
}
APIUtilityVerifier EqualityVerifier
@Test public void testURLWithFetchSize() throws SQLException {
Connection con=getConnection("default;fetchSize=1234");
Statement stmt=con.createStatement();
assertEquals(stmt.getFetchSize(),1234);
}
APIUtilityVerifier IterativeVerifier BranchVerifier EqualityVerifier
@Test public void testMetaDataGetColumns() throws SQLException {
Map tests=new HashMap();
tests.put(new String[]{"testhivejdbcdriver\\_table",null},2);
tests.put(new String[]{"testhivejdbc%",null},8);
tests.put(new String[]{"testhiveJDBC%",null},8);
tests.put(new String[]{"%jdbcdriver\\_table",null},2);
tests.put(new String[]{"%jdbcdriver\\_table%","under\\_col"},1);
tests.put(new String[]{"%jdbcdriver\\_table%","under\\_co_"},1);
tests.put(new String[]{"%jdbcdriver\\_table%","under_col"},1);
tests.put(new String[]{"%jdbcdriver\\_table%","und%"},1);
tests.put(new String[]{"%jdbcdriver\\_table%","%"},2);
tests.put(new String[]{"%jdbcdriver\\_table%","_%"},2);
for ( String[] checkPattern : tests.keySet()) {
ResultSet rs=con.getMetaData().getColumns(null,"default",checkPattern[0],checkPattern[1]);
ResultSetMetaData rsmd=rs.getMetaData();
assertEquals("TABLE_CAT",rsmd.getColumnName(1));
int cnt=0;
while (rs.next()) {
String columnname=rs.getString("COLUMN_NAME");
int ordinalPos=rs.getInt("ORDINAL_POSITION");
switch (cnt) {
case 0:
assertEquals("Wrong column name found","under_col",columnname);
assertEquals("Wrong ordinal position found",ordinalPos,1);
break;
case 1:
assertEquals("Wrong column name found","value",columnname);
assertEquals("Wrong ordinal position found",ordinalPos,2);
break;
default :
break;
}
cnt++;
}
rs.close();
assertEquals("Found less columns then we test for.",tests.get(checkPattern).intValue(),cnt);
}
}
APIUtilityVerifier IterativeVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testResultSetMetaData() throws SQLException {
Statement stmt=con.createStatement();
ResultSet res=stmt.executeQuery("select c1, c2, c3, c4, c5 as a, c6, c7, c8, c9, c10, c11, c12, " + "c1*2, sentences(null, null, null) as b, c17, c18, c20, c21, c22, c23 from " + dataTypeTableName + " limit 1");
ResultSetMetaData meta=res.getMetaData();
ResultSet colRS=con.getMetaData().getColumns(null,null,dataTypeTableName.toLowerCase(),null);
assertEquals(20,meta.getColumnCount());
assertTrue(colRS.next());
assertEquals("c1",meta.getColumnName(1));
assertEquals(Types.INTEGER,meta.getColumnType(1));
assertEquals("int",meta.getColumnTypeName(1));
assertEquals(11,meta.getColumnDisplaySize(1));
assertEquals(10,meta.getPrecision(1));
assertEquals(0,meta.getScale(1));
assertEquals("c1",colRS.getString("COLUMN_NAME"));
assertEquals(Types.INTEGER,colRS.getInt("DATA_TYPE"));
assertEquals("int",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(1),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(1),colRS.getInt("DECIMAL_DIGITS"));
assertTrue(colRS.next());
assertEquals("c2",meta.getColumnName(2));
assertEquals("boolean",meta.getColumnTypeName(2));
assertEquals(Types.BOOLEAN,meta.getColumnType(2));
assertEquals(1,meta.getColumnDisplaySize(2));
assertEquals(1,meta.getPrecision(2));
assertEquals(0,meta.getScale(2));
assertEquals("c2",colRS.getString("COLUMN_NAME"));
assertEquals(Types.BOOLEAN,colRS.getInt("DATA_TYPE"));
assertEquals("boolean",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getScale(2),colRS.getInt("DECIMAL_DIGITS"));
assertTrue(colRS.next());
assertEquals("c3",meta.getColumnName(3));
assertEquals(Types.DOUBLE,meta.getColumnType(3));
assertEquals("double",meta.getColumnTypeName(3));
assertEquals(25,meta.getColumnDisplaySize(3));
assertEquals(15,meta.getPrecision(3));
assertEquals(15,meta.getScale(3));
assertEquals("c3",colRS.getString("COLUMN_NAME"));
assertEquals(Types.DOUBLE,colRS.getInt("DATA_TYPE"));
assertEquals("double",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(3),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(3),colRS.getInt("DECIMAL_DIGITS"));
assertTrue(colRS.next());
assertEquals("c4",meta.getColumnName(4));
assertEquals(Types.VARCHAR,meta.getColumnType(4));
assertEquals("string",meta.getColumnTypeName(4));
assertEquals(Integer.MAX_VALUE,meta.getColumnDisplaySize(4));
assertEquals(Integer.MAX_VALUE,meta.getPrecision(4));
assertEquals(0,meta.getScale(4));
assertEquals("c4",colRS.getString("COLUMN_NAME"));
assertEquals(Types.VARCHAR,colRS.getInt("DATA_TYPE"));
assertEquals("string",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(4),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(4),colRS.getInt("DECIMAL_DIGITS"));
assertTrue(colRS.next());
assertEquals("a",meta.getColumnName(5));
assertEquals(Types.ARRAY,meta.getColumnType(5));
assertEquals("array",meta.getColumnTypeName(5));
assertEquals(Integer.MAX_VALUE,meta.getColumnDisplaySize(5));
assertEquals(Integer.MAX_VALUE,meta.getPrecision(5));
assertEquals(0,meta.getScale(5));
assertEquals("c5",colRS.getString("COLUMN_NAME"));
assertEquals(Types.ARRAY,colRS.getInt("DATA_TYPE"));
assertEquals("array",colRS.getString("TYPE_NAME").toLowerCase());
assertTrue(colRS.next());
assertEquals("c6",meta.getColumnName(6));
assertEquals(Types.JAVA_OBJECT,meta.getColumnType(6));
assertEquals("map",meta.getColumnTypeName(6));
assertEquals(Integer.MAX_VALUE,meta.getColumnDisplaySize(6));
assertEquals(Integer.MAX_VALUE,meta.getPrecision(6));
assertEquals(0,meta.getScale(6));
assertEquals("c6",colRS.getString("COLUMN_NAME"));
assertEquals(Types.JAVA_OBJECT,colRS.getInt("DATA_TYPE"));
assertEquals("map",colRS.getString("TYPE_NAME").toLowerCase());
assertTrue(colRS.next());
assertEquals("c7",meta.getColumnName(7));
assertEquals(Types.JAVA_OBJECT,meta.getColumnType(7));
assertEquals("map",meta.getColumnTypeName(7));
assertEquals(Integer.MAX_VALUE,meta.getColumnDisplaySize(7));
assertEquals(Integer.MAX_VALUE,meta.getPrecision(7));
assertEquals(0,meta.getScale(7));
assertEquals("c7",colRS.getString("COLUMN_NAME"));
assertEquals(Types.JAVA_OBJECT,colRS.getInt("DATA_TYPE"));
assertEquals("map",colRS.getString("TYPE_NAME").toLowerCase());
assertTrue(colRS.next());
assertEquals("c8",meta.getColumnName(8));
assertEquals(Types.STRUCT,meta.getColumnType(8));
assertEquals("struct",meta.getColumnTypeName(8));
assertEquals(Integer.MAX_VALUE,meta.getColumnDisplaySize(8));
assertEquals(Integer.MAX_VALUE,meta.getPrecision(8));
assertEquals(0,meta.getScale(8));
assertEquals("c8",colRS.getString("COLUMN_NAME"));
assertEquals(Types.STRUCT,colRS.getInt("DATA_TYPE"));
assertEquals("struct",colRS.getString("TYPE_NAME").toLowerCase());
assertTrue(colRS.next());
assertEquals("c9",meta.getColumnName(9));
assertEquals(Types.TINYINT,meta.getColumnType(9));
assertEquals("tinyint",meta.getColumnTypeName(9));
assertEquals(4,meta.getColumnDisplaySize(9));
assertEquals(3,meta.getPrecision(9));
assertEquals(0,meta.getScale(9));
assertEquals("c9",colRS.getString("COLUMN_NAME"));
assertEquals(Types.TINYINT,colRS.getInt("DATA_TYPE"));
assertEquals("tinyint",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(9),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(9),colRS.getInt("DECIMAL_DIGITS"));
assertTrue(colRS.next());
assertEquals("c10",meta.getColumnName(10));
assertEquals(Types.SMALLINT,meta.getColumnType(10));
assertEquals("smallint",meta.getColumnTypeName(10));
assertEquals(6,meta.getColumnDisplaySize(10));
assertEquals(5,meta.getPrecision(10));
assertEquals(0,meta.getScale(10));
assertEquals("c10",colRS.getString("COLUMN_NAME"));
assertEquals(Types.SMALLINT,colRS.getInt("DATA_TYPE"));
assertEquals("smallint",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(10),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(10),colRS.getInt("DECIMAL_DIGITS"));
assertTrue(colRS.next());
assertEquals("c11",meta.getColumnName(11));
assertEquals(Types.FLOAT,meta.getColumnType(11));
assertEquals("float",meta.getColumnTypeName(11));
assertEquals(24,meta.getColumnDisplaySize(11));
assertEquals(7,meta.getPrecision(11));
assertEquals(7,meta.getScale(11));
assertEquals("c11",colRS.getString("COLUMN_NAME"));
assertEquals(Types.FLOAT,colRS.getInt("DATA_TYPE"));
assertEquals("float",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(11),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(11),colRS.getInt("DECIMAL_DIGITS"));
assertTrue(colRS.next());
assertEquals("c12",meta.getColumnName(12));
assertEquals(Types.BIGINT,meta.getColumnType(12));
assertEquals("bigint",meta.getColumnTypeName(12));
assertEquals(20,meta.getColumnDisplaySize(12));
assertEquals(19,meta.getPrecision(12));
assertEquals(0,meta.getScale(12));
assertEquals("c12",colRS.getString("COLUMN_NAME"));
assertEquals(Types.BIGINT,colRS.getInt("DATA_TYPE"));
assertEquals("bigint",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(12),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(12),colRS.getInt("DECIMAL_DIGITS"));
assertEquals("c12_1",meta.getColumnName(13));
assertEquals(Types.INTEGER,meta.getColumnType(13));
assertEquals("int",meta.getColumnTypeName(13));
assertEquals(11,meta.getColumnDisplaySize(13));
assertEquals(10,meta.getPrecision(13));
assertEquals(0,meta.getScale(13));
assertEquals("b",meta.getColumnName(14));
assertEquals(Types.ARRAY,meta.getColumnType(14));
assertEquals("array",meta.getColumnTypeName(14));
assertEquals(Integer.MAX_VALUE,meta.getColumnDisplaySize(14));
assertEquals(Integer.MAX_VALUE,meta.getPrecision(14));
assertEquals(0,meta.getScale(14));
assertTrue(colRS.next());
assertTrue(colRS.next());
assertTrue(colRS.next());
assertTrue(colRS.next());
assertTrue(colRS.next());
assertEquals("c17",meta.getColumnName(15));
assertEquals(Types.TIMESTAMP,meta.getColumnType(15));
assertEquals("timestamp",meta.getColumnTypeName(15));
assertEquals(29,meta.getColumnDisplaySize(15));
assertEquals(29,meta.getPrecision(15));
assertEquals(9,meta.getScale(15));
assertEquals("c17",colRS.getString("COLUMN_NAME"));
assertEquals(Types.TIMESTAMP,colRS.getInt("DATA_TYPE"));
assertEquals("timestamp",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(15),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(15),colRS.getInt("DECIMAL_DIGITS"));
assertTrue(colRS.next());
assertEquals("c18",meta.getColumnName(16));
assertEquals(Types.DECIMAL,meta.getColumnType(16));
assertEquals("decimal",meta.getColumnTypeName(16));
assertEquals(18,meta.getColumnDisplaySize(16));
assertEquals(16,meta.getPrecision(16));
assertEquals(7,meta.getScale(16));
assertEquals("c18",colRS.getString("COLUMN_NAME"));
assertEquals(Types.DECIMAL,colRS.getInt("DATA_TYPE"));
assertEquals("decimal",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(16),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(16),colRS.getInt("DECIMAL_DIGITS"));
assertTrue(colRS.next());
assertTrue(colRS.next());
assertEquals("c20",meta.getColumnName(17));
assertEquals(Types.DATE,meta.getColumnType(17));
assertEquals("date",meta.getColumnTypeName(17));
assertEquals(10,meta.getColumnDisplaySize(17));
assertEquals(10,meta.getPrecision(17));
assertEquals(0,meta.getScale(17));
assertEquals("c20",colRS.getString("COLUMN_NAME"));
assertEquals(Types.DATE,colRS.getInt("DATA_TYPE"));
assertEquals("date",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(17),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(17),colRS.getInt("DECIMAL_DIGITS"));
assertTrue(colRS.next());
assertEquals("c21",meta.getColumnName(18));
assertEquals(Types.VARCHAR,meta.getColumnType(18));
assertEquals("varchar",meta.getColumnTypeName(18));
assertEquals(20,meta.getColumnDisplaySize(18));
assertEquals(20,meta.getPrecision(18));
assertEquals(0,meta.getScale(18));
assertEquals("c21",colRS.getString("COLUMN_NAME"));
assertEquals(Types.VARCHAR,colRS.getInt("DATA_TYPE"));
assertEquals("varchar",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(18),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(18),colRS.getInt("DECIMAL_DIGITS"));
assertTrue(colRS.next());
assertEquals("c22",meta.getColumnName(19));
assertEquals(Types.CHAR,meta.getColumnType(19));
assertEquals("char",meta.getColumnTypeName(19));
assertEquals(15,meta.getColumnDisplaySize(19));
assertEquals(15,meta.getPrecision(19));
assertEquals(0,meta.getScale(19));
assertEquals("c23",meta.getColumnName(20));
assertEquals(Types.BINARY,meta.getColumnType(20));
assertEquals("binary",meta.getColumnTypeName(20));
assertEquals(Integer.MAX_VALUE,meta.getColumnDisplaySize(20));
assertEquals(Integer.MAX_VALUE,meta.getPrecision(20));
assertEquals(0,meta.getScale(20));
assertEquals("c22",colRS.getString("COLUMN_NAME"));
assertEquals(Types.CHAR,colRS.getInt("DATA_TYPE"));
assertEquals("char",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(19),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(19),colRS.getInt("DECIMAL_DIGITS"));
for (int i=1; i <= meta.getColumnCount(); i++) {
assertFalse(meta.isAutoIncrement(i));
assertFalse(meta.isCurrency(i));
assertEquals(ResultSetMetaData.columnNullable,meta.isNullable(i));
}
}
APIUtilityVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testDescribeTable() throws SQLException {
Statement stmt=con.createStatement();
assertNotNull("Statement is null",stmt);
ResultSet res=stmt.executeQuery("describe " + tableName);
res.next();
assertEquals("Column name 'under_col' not found","under_col",res.getString(1));
assertEquals("Column type 'under_col' for column under_col not found","int",res.getString(2));
res.next();
assertEquals("Column name 'value' not found","value",res.getString(1));
assertEquals("Column type 'string' for column key not found","string",res.getString(2));
assertFalse("More results found than expected",res.next());
}
APIUtilityVerifier IterativeVerifier BooleanVerifier EqualityVerifier NullVerifier PublicFieldVerifier HybridVerifier
@Test public void testDataTypes() throws Exception {
Statement stmt=con.createStatement();
ResultSet res=stmt.executeQuery("select * from " + dataTypeTableName + " order by c1");
ResultSetMetaData meta=res.getMetaData();
assertTrue(res.next());
for (int i=1; i < meta.getColumnCount(); i++) {
assertNull("Column " + i + " should be null",res.getObject(i));
}
assertEquals(0,res.getInt(1));
assertEquals(false,res.getBoolean(2));
assertEquals(0d,res.getDouble(3),floatCompareDelta);
assertEquals(null,res.getString(4));
assertEquals(null,res.getString(5));
assertEquals(null,res.getString(6));
assertEquals(null,res.getString(7));
assertEquals(null,res.getString(8));
assertEquals(0,res.getByte(9));
assertEquals(0,res.getShort(10));
assertEquals(0f,res.getFloat(11),floatCompareDelta);
assertEquals(0L,res.getLong(12));
assertEquals(null,res.getString(13));
assertEquals(null,res.getString(14));
assertEquals(null,res.getString(15));
assertEquals(null,res.getString(16));
assertEquals(null,res.getString(17));
assertEquals(null,res.getString(18));
assertEquals(null,res.getString(19));
assertEquals(null,res.getString(20));
assertEquals(null,res.getDate(20));
assertEquals(null,res.getString(21));
assertEquals(null,res.getString(22));
assertTrue(res.next());
assertEquals(-1,res.getInt(1));
assertEquals(false,res.getBoolean(2));
assertEquals(-1.1d,res.getDouble(3),floatCompareDelta);
assertEquals("",res.getString(4));
assertEquals("[]",res.getString(5));
assertEquals("{}",res.getString(6));
assertEquals("{}",res.getString(7));
assertEquals("{\"r\":null,\"s\":null,\"t\":null}",res.getString(8));
assertEquals(-1,res.getByte(9));
assertEquals(-1,res.getShort(10));
assertEquals(-1.0f,res.getFloat(11),floatCompareDelta);
assertEquals(-1,res.getLong(12));
assertEquals("[]",res.getString(13));
assertEquals("{}",res.getString(14));
assertEquals("{\"r\":null,\"s\":null}",res.getString(15));
assertEquals("[]",res.getString(16));
assertEquals(null,res.getString(17));
assertEquals(null,res.getTimestamp(17));
assertEquals(null,res.getBigDecimal(18));
assertEquals(null,res.getString(19));
assertEquals(null,res.getString(20));
assertEquals(null,res.getDate(20));
assertEquals(null,res.getString(21));
assertEquals(null,res.getString(22));
assertEquals(null,res.getString(23));
assertTrue(res.next());
assertEquals(1,res.getInt(1));
assertEquals(true,res.getBoolean(2));
assertEquals(1.1d,res.getDouble(3),floatCompareDelta);
assertEquals("1",res.getString(4));
assertEquals("[1,2]",res.getString(5));
assertEquals("{1:\"x\",2:\"y\"}",res.getString(6));
assertEquals("{\"k\":\"v\"}",res.getString(7));
assertEquals("{\"r\":\"a\",\"s\":9,\"t\":2.2}",res.getString(8));
assertEquals(1,res.getByte(9));
assertEquals(1,res.getShort(10));
assertEquals(1.0f,res.getFloat(11),floatCompareDelta);
assertEquals(1,res.getLong(12));
assertEquals("[[\"a\",\"b\"],[\"c\",\"d\"]]",res.getString(13));
assertEquals("{1:{11:12,13:14},2:{21:22}}",res.getString(14));
assertEquals("{\"r\":1,\"s\":{\"a\":2,\"b\":\"x\"}}",res.getString(15));
assertEquals("[{\"m\":{},\"n\":1},{\"m\":{\"a\":\"b\",\"c\":\"d\"},\"n\":2}]",res.getString(16));
assertEquals("2012-04-22 09:00:00.123456789",res.getString(17));
assertEquals("2012-04-22 09:00:00.123456789",res.getTimestamp(17).toString());
assertEquals("123456789.1234560",res.getBigDecimal(18).toString());
assertEquals("abcd",res.getString(19));
assertEquals("2013-01-01",res.getString(20));
assertEquals("2013-01-01",res.getDate(20).toString());
assertEquals("abc123",res.getString(21));
assertEquals("abc123 ",res.getString(22));
byte[] bytes="X'01FF'".getBytes("UTF-8");
InputStream resultSetInputStream=res.getBinaryStream(23);
int len=bytes.length;
byte[] b=new byte[len];
resultSetInputStream.read(b,0,len);
for (int i=0; i < len; i++) {
assertEquals(bytes[i],b[i]);
}
assertEquals(true,res.getBoolean(1));
assertEquals(true,res.getBoolean(4));
assertFalse(meta.isCaseSensitive(1));
assertFalse(meta.isCaseSensitive(2));
assertFalse(meta.isCaseSensitive(3));
assertTrue(meta.isCaseSensitive(4));
assertFalse(res.next());
}
InternalCallVerifier EqualityVerifier
@Test public void testParseUrlHttpMode() throws SQLException, JdbcUriParseException, ZooKeeperHiveClientException {
new HiveDriver();
for ( String[] testValues : HTTP_URL_PROPERTIES) {
JdbcConnectionParams params=Utils.parseURL(testValues[0],new Properties());
assertEquals(params.getHost(),testValues[1]);
assertEquals(params.getPort(),Integer.parseInt(testValues[2]));
assertEquals(params.getDbName(),testValues[3]);
assertEquals(params.getSessionVars().get("transportMode"),testValues[4]);
assertEquals(params.getSessionVars().get("httpPath"),testValues[5]);
}
}
APIUtilityVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* test getProcedureColumns()
* @throws SQLException
*/
@Test public void testProcCols() throws SQLException {
DatabaseMetaData dbmd=con.getMetaData();
assertNotNull(dbmd);
ResultSet res=dbmd.getProcedureColumns(null,null,null,null);
ResultSetMetaData md=res.getMetaData();
assertEquals(md.getColumnCount(),20);
assertFalse(res.next());
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
/**
* Verify selecting using builtin UDFs
* @throws SQLException
*/
@Test public void testBuiltInUDFCol() throws SQLException {
Statement stmt=con.createStatement();
ResultSet res=stmt.executeQuery("select c12, bin(c12) from " + dataTypeTableName + " where c1=1");
ResultSetMetaData md=res.getMetaData();
assertEquals(md.getColumnCount(),2);
assertEquals(md.getColumnLabel(2),"c1");
assertTrue(res.next());
assertEquals(res.getLong(1),1);
assertEquals(res.getString(2),"1");
res.close();
}
UtilityVerifier EqualityVerifier HybridVerifier
/**
* Negative Test for cursor repositioning to start of resultset
* Verify unsupported JDBC resultset methods
* @throws Exception
*/
@Test public void testFetchFirstError() throws Exception {
Statement stmt=con.createStatement();
ResultSet res=stmt.executeQuery("select * from " + tableName);
try {
res.beforeFirst();
fail("beforeFirst() should fail for normal resultset");
}
catch ( SQLException e) {
assertEquals("Method not supported for TYPE_FORWARD_ONLY resultset",e.getMessage());
}
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testMetaDataGetSchemas() throws SQLException {
ResultSet rs=con.getMetaData().getSchemas();
ResultSetMetaData resMeta=rs.getMetaData();
assertEquals(2,resMeta.getColumnCount());
assertEquals("TABLE_SCHEM",resMeta.getColumnName(1));
assertEquals("TABLE_CATALOG",resMeta.getColumnName(2));
assertTrue(rs.next());
assertEquals("default",rs.getString(1));
assertTrue(rs.next());
assertEquals("testdb",rs.getString(1));
assertFalse(rs.next());
rs.close();
}
APIUtilityVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* test testProccedures()
* @throws SQLException
*/
@Test public void testProccedures() throws SQLException {
DatabaseMetaData dbmd=con.getMetaData();
assertNotNull(dbmd);
ResultSet res=dbmd.getProcedures(null,null,null);
ResultSetMetaData md=res.getMetaData();
assertEquals(md.getColumnCount(),9);
assertFalse(res.next());
}
APIUtilityVerifier IterativeVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testIntervalTypes() throws Exception {
Statement stmt=con.createStatement();
ResultSet res=stmt.executeQuery("select case when c17 is null then null else interval '1' year end as col1," + " c17 - c17 as col2 from " + dataTypeTableName + " order by col1");
ResultSetMetaData meta=res.getMetaData();
assertEquals("col1",meta.getColumnLabel(1));
assertEquals(java.sql.Types.OTHER,meta.getColumnType(1));
assertEquals("interval_year_month",meta.getColumnTypeName(1));
assertEquals(11,meta.getColumnDisplaySize(1));
assertEquals(11,meta.getPrecision(1));
assertEquals(0,meta.getScale(1));
assertEquals(HiveIntervalYearMonth.class.getName(),meta.getColumnClassName(1));
assertEquals("col2",meta.getColumnLabel(2));
assertEquals(java.sql.Types.OTHER,meta.getColumnType(2));
assertEquals("interval_day_time",meta.getColumnTypeName(2));
assertEquals(29,meta.getColumnDisplaySize(2));
assertEquals(29,meta.getPrecision(2));
assertEquals(0,meta.getScale(2));
assertEquals(HiveIntervalDayTime.class.getName(),meta.getColumnClassName(2));
assertTrue(res.next());
for (int i=1; i < meta.getColumnCount(); i++) {
assertNull("Column " + i + " should be null",res.getObject(i));
}
assertTrue(res.next());
for (int i=1; i < meta.getColumnCount(); i++) {
assertNull("Column " + i + " should be null",res.getObject(i));
}
assertTrue(res.next());
assertEquals("1-0",res.getString(1));
assertEquals(1,((HiveIntervalYearMonth)res.getObject(1)).getYears());
assertEquals("0 00:00:00.000000000",res.getString(2));
assertEquals(0,((HiveIntervalDayTime)res.getObject(2)).getDays());
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testMetaDataGetExternalTables() throws SQLException {
Statement stmt=con.createStatement();
stmt.execute("set " + HiveConf.ConfVars.HIVE_SERVER2_TABLE_TYPE_MAPPING.varname + " = "+ TableTypeMappings.HIVE.toString());
stmt.close();
ResultSet rs=con.getMetaData().getTables(null,null,null,new String[]{TableType.EXTERNAL_TABLE.toString()});
ResultSetMetaData resMeta=rs.getMetaData();
assertEquals(10,resMeta.getColumnCount());
assertEquals("TABLE_CAT",resMeta.getColumnName(1));
assertEquals("TABLE_SCHEM",resMeta.getColumnName(2));
assertEquals("TABLE_NAME",resMeta.getColumnName(3));
assertEquals("TABLE_TYPE",resMeta.getColumnName(4));
assertEquals("REMARKS",resMeta.getColumnName(5));
rs.next();
String resultDbName=rs.getString("TABLE_SCHEM");
assertEquals(resultDbName,"default");
String resultTableName=rs.getString("TABLE_NAME");
assertEquals(resultTableName,externalTable.toLowerCase());
String resultTableComment=rs.getString("REMARKS");
assertTrue("Missing comment on the table.",resultTableComment.length() > 0);
String tableType=rs.getString("TABLE_TYPE");
assertEquals(TableType.EXTERNAL_TABLE.toString(),tableType);
assertFalse("Unexpected table",rs.next());
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
/**
* Verify selecting named expression columns
* @throws SQLException
*/
@Test public void testExprCol() throws SQLException {
Statement stmt=con.createStatement();
ResultSet res=stmt.executeQuery("select c1+1 as col1, length(c4) as len from " + dataTypeTableName + " where c1=1");
ResultSetMetaData md=res.getMetaData();
assertEquals(md.getColumnCount(),2);
assertEquals(md.getColumnLabel(1),"col1");
assertEquals(md.getColumnLabel(2),"len");
assertTrue(res.next());
assertEquals(res.getInt(1),2);
assertEquals(res.getInt(2),1);
res.close();
}
UtilityVerifier EqualityVerifier HybridVerifier
@Test public void testErrorDiag() throws SQLException {
Statement stmt=con.createStatement();
try {
stmt.executeQuery("select from " + dataTypeTableName);
fail("SQLException is expected");
}
catch ( SQLException e) {
assertEquals("42000",e.getSQLState());
}
try {
stmt.executeQuery("select * from nonTable");
fail("SQLException is expected");
}
catch ( SQLException e) {
assertEquals("42S02",e.getSQLState());
}
try {
stmt.executeQuery("select zzzz from " + dataTypeTableName);
fail("SQLException is expected");
}
catch ( SQLException e) {
assertEquals("42000",e.getSQLState());
}
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testMetaDataGetCatalogs() throws SQLException {
ResultSet rs=con.getMetaData().getCatalogs();
ResultSetMetaData resMeta=rs.getMetaData();
assertEquals(1,resMeta.getColumnCount());
assertEquals("TABLE_CAT",resMeta.getColumnName(1));
assertFalse(rs.next());
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testDatabaseMetaData() throws SQLException {
DatabaseMetaData meta=con.getMetaData();
assertEquals("Apache Hive",meta.getDatabaseProductName());
assertEquals(HiveVersionInfo.getVersion(),meta.getDatabaseProductVersion());
assertEquals(System.getProperty("hive.version"),meta.getDatabaseProductVersion());
assertTrue("verifying hive version pattern. got " + meta.getDatabaseProductVersion(),Pattern.matches("\\d+\\.\\d+\\.\\d+.*",meta.getDatabaseProductVersion()));
assertEquals(DatabaseMetaData.sqlStateSQL99,meta.getSQLStateType());
assertFalse(meta.supportsCatalogsInTableDefinitions());
assertTrue(meta.supportsSchemasInTableDefinitions());
assertTrue(meta.supportsSchemasInDataManipulation());
assertFalse(meta.supportsMultipleResultSets());
assertFalse(meta.supportsStoredProcedures());
assertTrue(meta.supportsAlterTableWithAddColumn());
assertTrue(meta.getDatabaseMajorVersion() > -1);
assertTrue(meta.getDatabaseMinorVersion() > -1);
}
APIUtilityVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* test getPrimaryKeys()
* @throws SQLException
*/
@Test public void testPrimaryKeys() throws SQLException {
DatabaseMetaData dbmd=con.getMetaData();
assertNotNull(dbmd);
ResultSet res=dbmd.getPrimaryKeys(null,null,null);
ResultSetMetaData md=res.getMetaData();
assertEquals(md.getColumnCount(),6);
assertFalse(res.next());
}
APIUtilityVerifier UtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testPrepareSetDate() throws Exception {
try {
String sql="select * from " + dataTypeTableName + " where c20 = ?";
PreparedStatement ps=con.prepareStatement(sql);
java.sql.Date dtValue=java.sql.Date.valueOf("2013-01-01");
ps.setDate(1,dtValue);
ResultSet res=ps.executeQuery();
assertTrue(res.next());
assertEquals("2013-01-01",res.getString(20));
ps.close();
}
catch ( Exception e) {
e.printStackTrace();
fail(e.toString());
}
}
UtilityVerifier EqualityVerifier HybridVerifier
@Test public void testResultSetColumnNameCaseInsensitive() throws SQLException {
Statement stmt=con.createStatement();
ResultSet res;
res=stmt.executeQuery("select c1 from " + dataTypeTableName + " limit 1");
try {
int count=0;
while (res.next()) {
res.findColumn("c1");
res.findColumn("C1");
count++;
}
assertEquals(count,1);
}
catch ( Exception e) {
String msg="Unexpected exception: " + e;
LOG.info(msg,e);
fail(msg);
}
res=stmt.executeQuery("select c1 C1 from " + dataTypeTableName + " limit 1");
try {
int count=0;
while (res.next()) {
res.findColumn("c1");
res.findColumn("C1");
count++;
}
assertEquals(count,1);
}
catch ( Exception e) {
String msg="Unexpected exception: " + e;
LOG.info(msg,e);
fail(msg);
}
}
APIUtilityVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testShowGrant() throws SQLException {
Statement stmt=con.createStatement();
stmt.execute("grant select on table " + dataTypeTableName + " to user hive_test_user");
stmt.execute("show grant user hive_test_user on table " + dataTypeTableName);
ResultSet res=stmt.getResultSet();
assertTrue(res.next());
assertEquals("default",res.getString(1));
assertEquals(dataTypeTableName,res.getString(2));
assertEquals("",res.getString(3));
assertEquals("",res.getString(4));
assertEquals("hive_test_user",res.getString(5));
assertEquals("USER",res.getString(6));
assertEquals("SELECT",res.getString(7));
assertEquals(false,res.getBoolean(8));
assertEquals(-1,res.getLong(9));
assertNotNull(res.getString(10));
assertFalse(res.next());
res.close();
}
InternalCallVerifier EqualityVerifier
@Test public void testDriverProperties() throws SQLException {
HiveDriver driver=new HiveDriver();
for ( String[] testValues : URL_PROPERTIES) {
DriverPropertyInfo[] dpi=driver.getPropertyInfo(testValues[0],null);
assertEquals("unexpected DriverPropertyInfo array size",3,dpi.length);
assertDpi(dpi[0],"HOST",testValues[1]);
assertDpi(dpi[1],"PORT",testValues[2]);
assertDpi(dpi[2],"DBNAME",testValues[3]);
}
}
APIUtilityVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* test getImportedKeys()
* @throws SQLException
*/
@Test public void testImportedKeys() throws SQLException {
DatabaseMetaData dbmd=con.getMetaData();
assertNotNull(dbmd);
ResultSet res=dbmd.getImportedKeys(null,null,null);
ResultSetMetaData md=res.getMetaData();
assertEquals(md.getColumnCount(),14);
assertFalse(res.next());
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testDuplicateColumnNameOrder() throws SQLException {
Statement stmt=con.createStatement();
ResultSet rs=stmt.executeQuery("SELECT 1 AS a, 2 AS a from " + tableName);
assertTrue(rs.next());
assertEquals(1,rs.getInt("a"));
rs.close();
}
APIUtilityVerifier IterativeVerifier BooleanVerifier EqualityVerifier HybridVerifier
/**
* validate schema generated by "set" command
* @throws SQLException
*/
@Test public void testSetCommand() throws SQLException {
String sql="set -v";
Statement stmt=con.createStatement();
ResultSet res=stmt.executeQuery(sql);
ResultSetMetaData md=res.getMetaData();
assertEquals(1,md.getColumnCount());
assertEquals(SET_COLUMN_NAME,md.getColumnLabel(1));
int numLines=0;
while (res.next()) {
numLines++;
String rline=res.getString(1);
assertFalse("set output must not contain hidden variables such as the metastore password:" + rline,rline.contains(HiveConf.ConfVars.METASTOREPWD.varname) && !(rline.contains(HiveConf.ConfVars.HIVE_CONF_HIDDEN_LIST.varname)));
}
assertTrue("Nothing returned by set -v",numLines > 0);
res.close();
stmt.close();
}
APIUtilityVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testAutoCommit() throws Exception {
con.clearWarnings();
con.setAutoCommit(true);
assertNull(con.getWarnings());
con.setAutoCommit(false);
SQLWarning warning=con.getWarnings();
assertNotNull(warning);
assertEquals("Hive does not support autoCommit=false",warning.getMessage());
assertNull(warning.getNextWarning());
con.clearWarnings();
}
UtilityVerifier EqualityVerifier HybridVerifier
/**
* Negative Test for cursor repositioning to start of resultset
* Verify unsupported JDBC resultset attributes
* @throws Exception
*/
@Test public void testUnsupportedFetchTypes() throws Exception {
try {
con.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE,ResultSet.CONCUR_READ_ONLY);
fail("createStatement with TYPE_SCROLL_SENSITIVE should fail");
}
catch ( SQLException e) {
assertEquals("HYC00",e.getSQLState().trim());
}
try {
con.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,ResultSet.CONCUR_UPDATABLE);
fail("createStatement with CONCUR_UPDATABLE should fail");
}
catch ( SQLException e) {
assertEquals("HYC00",e.getSQLState().trim());
}
}
APIUtilityVerifier EqualityVerifier
/**
* Validate the Metadata for the result set of a metadata getColumns call.
*/
@Test public void testMetaDataGetColumnsMetaData() throws SQLException {
ResultSet rs=con.getMetaData().getColumns(null,null,"testhivejdbcdriver\\_table",null);
ResultSetMetaData rsmd=rs.getMetaData();
assertEquals("TABLE_CAT",rsmd.getColumnName(1));
assertEquals(Types.VARCHAR,rsmd.getColumnType(1));
assertEquals(Integer.MAX_VALUE,rsmd.getColumnDisplaySize(1));
assertEquals("ORDINAL_POSITION",rsmd.getColumnName(17));
assertEquals(Types.INTEGER,rsmd.getColumnType(17));
assertEquals(11,rsmd.getColumnDisplaySize(17));
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testShowRoleGrant() throws SQLException {
Statement stmt=con.createStatement();
try {
stmt.execute("drop role role1");
}
catch ( Exception ex) {
LOG.warn("Ignoring error during drop role: " + ex);
}
stmt.execute("create role role1");
stmt.execute("grant role role1 to user hive_test_user");
stmt.execute("show role grant user hive_test_user");
ResultSet res=stmt.getResultSet();
assertTrue(res.next());
assertEquals("public",res.getString(1));
assertTrue(res.next());
assertEquals("role1",res.getString(1));
res.close();
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testPrepareSetTimestamp() throws SQLException, ParseException {
String sql=String.format("SELECT * FROM %s WHERE c17 = ?",dataTypeTableName);
try (PreparedStatement ps=con.prepareStatement(sql)){
Timestamp timestamp=Timestamp.valueOf("2012-04-22 09:00:00.123456789");
ps.setTimestamp(1,timestamp);
try (ResultSet resultSet=ps.executeQuery()){
assertTrue(resultSet.next());
assertEquals(1,resultSet.getInt(1));
assertFalse(resultSet.next());
}
}
}
APIUtilityVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testShowColumns() throws SQLException {
Statement stmt=con.createStatement();
assertNotNull("Statement is null",stmt);
ResultSet res=stmt.executeQuery("show columns in " + tableName);
res.next();
assertEquals("Column name 'under_col' not found","under_col",res.getString(1));
res.next();
assertEquals("Column name 'value' not found","value",res.getString(1));
assertFalse("More results found than expected",res.next());
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testResultSetMetaDataDuplicateColumnNames() throws SQLException {
Statement stmt=con.createStatement();
ResultSet res=stmt.executeQuery("select c1 as c2_1, c2, c1*2 from " + dataTypeTableName + " limit 1");
ResultSetMetaData meta=res.getMetaData();
ResultSet colRS=con.getMetaData().getColumns(null,null,dataTypeTableName.toLowerCase(),null);
assertEquals(3,meta.getColumnCount());
assertTrue(colRS.next());
assertEquals("c2_1",meta.getColumnName(1));
assertTrue(colRS.next());
assertEquals("c2",meta.getColumnName(2));
assertTrue(colRS.next());
assertEquals("c2_2",meta.getColumnName(3));
}
Class: org.apache.hive.jdbc.TestJdbcWithLocalClusterSpark APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testPermFunc() throws Exception {
String udfClassName="org.apache.hadoop.hive.contrib.udf.example.UDFExampleAdd";
String mvnRepo=System.getProperty("maven.local.repository");
String hiveVersion=System.getProperty("hive.version");
String jarFileName="hive-contrib-" + hiveVersion + ".jar";
String[] pathParts={"org","apache","hive","hive-contrib",hiveVersion,jarFileName};
Path contribJarPath=new Path(mvnRepo);
for ( String pathPart : pathParts) {
contribJarPath=new Path(contribJarPath,pathPart);
}
FileSystem localFs=FileSystem.getLocal(conf);
assertTrue("Hive contrib JAR exists at " + contribJarPath,localFs.exists(contribJarPath));
String hdfsJarPathStr="hdfs:///" + jarFileName;
Path hdfsJarPath=new Path(hdfsJarPathStr);
FileSystem dfs=miniHS2.getDFS().getFileSystem();
dfs.copyFromLocalFile(contribJarPath,hdfsJarPath);
assertTrue("Verify contrib JAR copied to HDFS at " + hdfsJarPath,dfs.exists(hdfsJarPath));
String queryStr="CREATE FUNCTION example_add AS '" + udfClassName + "'"+ " USING JAR '"+ hdfsJarPathStr+ "'";
stmt.execute(queryStr);
ResultSet res;
res=stmt.executeQuery("DESCRIBE FUNCTION " + dbName + ".example_add");
checkForNotExist(res);
String tableName="testTab3";
setupKv1Tabs(tableName);
res=stmt.executeQuery("SELECT EXAMPLE_ADD(1, 2) FROM " + tableName + " LIMIT 1");
assertTrue("query has results",res.next());
assertEquals(3,res.getInt(1));
assertFalse("no more results",res.next());
Connection conn2=DriverManager.getConnection(miniHS2.getJdbcURL(dbName),System.getProperty("user.name"),"bar");
Statement stmt2=conn2.createStatement();
stmt2.execute("USE " + dbName);
res=stmt2.executeQuery("DESCRIBE FUNCTION " + dbName + ".example_add");
checkForNotExist(res);
res=stmt2.executeQuery("SELECT " + dbName + ".example_add(1, 1) FROM "+ tableName+ " LIMIT 1");
assertTrue("query has results",res.next());
assertEquals(2,res.getInt(1));
assertFalse("no more results",res.next());
stmt.execute("DROP TABLE " + tableName);
}
Class: org.apache.hive.jdbc.TestJdbcWithMiniHS2 BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testConnectionSchemaAPIs() throws Exception {
String db1="DB1";
HiveConnection hiveConn=(HiveConnection)hs2Conn;
assertEquals("default",hiveConn.getSchema());
Statement stmt=hs2Conn.createStatement();
stmt.execute("DROP DATABASE IF EXISTS " + db1 + " CASCADE");
stmt.execute("CREATE DATABASE " + db1);
assertEquals("default",hiveConn.getSchema());
stmt.execute("USE " + db1);
assertEquals(db1,hiveConn.getSchema());
stmt.execute("USE default");
assertEquals("default",hiveConn.getSchema());
hiveConn.setSchema(db1);
assertEquals(db1,hiveConn.getSchema());
hiveConn.setSchema("default");
assertEquals("default",hiveConn.getSchema());
assertTrue(hiveConn.getCatalog().isEmpty());
hiveConn.setCatalog("foo");
assertTrue(hiveConn.getCatalog().isEmpty());
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testConnection() throws Exception {
String tableName="testTab1";
Statement stmt=hs2Conn.createStatement();
stmt.execute("DROP TABLE IF EXISTS " + tableName);
stmt.execute("CREATE TABLE " + tableName + " (under_col INT COMMENT 'the under column', value STRING) COMMENT ' test table'");
stmt.execute("load data local inpath '" + kvDataFilePath.toString() + "' into table "+ tableName);
ResultSet res=stmt.executeQuery("SELECT * FROM " + tableName);
assertTrue(res.next());
assertEquals("val_238",res.getString(2));
res.close();
stmt.close();
}
Class: org.apache.hive.jdbc.TestJdbcWithMiniMr APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testPermFunc() throws Exception {
String udfClassName="org.apache.hadoop.hive.contrib.udf.example.UDFExampleAdd";
String mvnRepo=System.getProperty("maven.local.repository");
String hiveVersion=System.getProperty("hive.version");
String jarFileName="hive-contrib-" + hiveVersion + ".jar";
String[] pathParts={"org","apache","hive","hive-contrib",hiveVersion,jarFileName};
Path contribJarPath=new Path(mvnRepo);
for ( String pathPart : pathParts) {
contribJarPath=new Path(contribJarPath,pathPart);
}
FileSystem localFs=FileSystem.getLocal(conf);
assertTrue("Hive contrib JAR exists at " + contribJarPath,localFs.exists(contribJarPath));
String hdfsJarPathStr="hdfs:///" + jarFileName;
Path hdfsJarPath=new Path(hdfsJarPathStr);
FileSystem dfs=miniHS2.getDFS().getFileSystem();
dfs.copyFromLocalFile(contribJarPath,hdfsJarPath);
assertTrue("Verify contrib JAR copied to HDFS at " + hdfsJarPath,dfs.exists(hdfsJarPath));
String queryStr="CREATE FUNCTION example_add AS '" + udfClassName + "'"+ " USING JAR '"+ hdfsJarPathStr+ "'";
stmt.execute(queryStr);
ResultSet res;
res=stmt.executeQuery("DESCRIBE FUNCTION " + dbName + ".example_add");
checkForNotExist(res);
String tableName="testTab3";
setupKv1Tabs(tableName);
res=stmt.executeQuery("SELECT EXAMPLE_ADD(1, 2) FROM " + tableName + " LIMIT 1");
assertTrue("query has results",res.next());
assertEquals(3,res.getInt(1));
assertFalse("no more results",res.next());
Connection conn2=DriverManager.getConnection(miniHS2.getJdbcURL(dbName),System.getProperty("user.name"),"bar");
Statement stmt2=conn2.createStatement();
stmt2.execute("USE " + dbName);
res=stmt2.executeQuery("DESCRIBE FUNCTION " + dbName + ".example_add");
checkForNotExist(res);
res=stmt2.executeQuery("SELECT " + dbName + ".example_add(1, 1) FROM "+ tableName+ " LIMIT 1");
assertTrue("query has results",res.next());
assertEquals(2,res.getInt(1));
assertFalse("no more results",res.next());
stmt.execute("DROP TABLE " + tableName);
}
APIUtilityVerifier IterativeVerifier BranchVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testTempTable() throws Exception {
String tempTableName="tmp1";
stmt.execute("CREATE TEMPORARY TABLE " + tempTableName + " (key string, value string)");
stmt.execute("load data local inpath '" + dataFilePath.toString() + "' into table "+ tempTableName);
String resultVal="val_238";
String queryStr="SELECT * FROM " + tempTableName + " where value = '"+ resultVal+ "'";
verifyResult(queryStr,resultVal,2);
DatabaseMetaData md=hs2Conn.getMetaData();
assertTrue(md.getConnection() == hs2Conn);
ResultSet rs=md.getTables(null,null,tempTableName,null);
boolean foundTable=false;
while (rs.next()) {
String tableName=rs.getString(3);
if (tableName.equalsIgnoreCase(tempTableName)) {
assertFalse("Table not found yet",foundTable);
foundTable=true;
}
}
assertTrue("Found temp table",foundTable);
rs=md.getTables(null,null,null,null);
foundTable=false;
while (rs.next()) {
String tableName=rs.getString(3);
if (tableName.equalsIgnoreCase(tempTableName)) {
assertFalse("Table not found yet",foundTable);
foundTable=true;
}
}
assertTrue("Found temp table",foundTable);
rs=md.getColumns(null,null,tempTableName,null);
assertTrue("First row",rs.next());
assertTrue(rs.getString(3).equalsIgnoreCase(tempTableName));
assertTrue(rs.getString(4).equalsIgnoreCase("key"));
assertEquals(Types.VARCHAR,rs.getInt(5));
assertTrue("Second row",rs.next());
assertTrue(rs.getString(3).equalsIgnoreCase(tempTableName));
assertTrue(rs.getString(4).equalsIgnoreCase("value"));
assertEquals(Types.VARCHAR,rs.getInt(5));
Connection conn2=DriverManager.getConnection(miniHS2.getJdbcURL(dbName),System.getProperty("user.name"),"bar");
Statement stmt2=conn2.createStatement();
stmt2.execute("USE " + dbName);
boolean gotException=false;
try {
ResultSet res;
res=stmt2.executeQuery(queryStr);
}
catch ( SQLException err) {
assertTrue("Expecting table not found error, instead got: " + err,err.getMessage().contains("Table not found"));
gotException=true;
}
assertTrue("Exception while querying non-existing temp table",gotException);
}
Class: org.apache.hive.jdbc.TestSSL UtilityVerifier EqualityVerifier HybridVerifier
/**
* Test non-SSL client with SSL server fails
* @throws Exception
*/
@Test public void testConnectionMismatch() throws Exception {
setSslConfOverlay(confOverlay);
setBinaryConfOverlay(confOverlay);
miniHS2.start(confOverlay);
try {
hs2Conn=DriverManager.getConnection(miniHS2.getJdbcURL(),System.getProperty("user.name"),"bar");
fail("NON SSL connection should fail with SSL server");
}
catch ( SQLException e) {
assertEquals("08S01",e.getSQLState().trim());
}
try {
hs2Conn=DriverManager.getConnection(miniHS2.getJdbcURL() + ";ssl=false",System.getProperty("user.name"),"bar");
fail("NON SSL connection should fail with SSL server");
}
catch ( SQLException e) {
assertEquals("08S01",e.getSQLState().trim());
}
miniHS2.stop();
setHttpConfOverlay(confOverlay);
miniHS2.start(confOverlay);
try {
hs2Conn=DriverManager.getConnection(miniHS2.getJdbcURL("default",";ssl=false"),System.getProperty("user.name"),"bar");
fail("NON SSL connection should fail with SSL server");
}
catch ( SQLException e) {
assertEquals("08S01",e.getSQLState().trim());
}
}
UtilityVerifier EqualityVerifier HybridVerifier
/**
* Test SSL client with non-SSL server fails
* @throws Exception
*/
@Test public void testInvalidConfig() throws Exception {
clearSslConfOverlay(confOverlay);
setBinaryConfOverlay(confOverlay);
miniHS2.start(confOverlay);
DriverManager.setLoginTimeout(4);
try {
hs2Conn=DriverManager.getConnection(miniHS2.getJdbcURL("default",SSL_CONN_PARAMS),System.getProperty("user.name"),"bar");
fail("SSL connection should fail with NON-SSL server");
}
catch ( SQLException e) {
assertEquals("08S01",e.getSQLState().trim());
}
System.setProperty(JAVA_TRUST_STORE_PROP,dataFileDir + File.separator + TRUST_STORE_NAME);
System.setProperty(JAVA_TRUST_STORE_PASS_PROP,KEY_STORE_PASSWORD);
try {
hs2Conn=DriverManager.getConnection(miniHS2.getJdbcURL() + ";ssl=true",System.getProperty("user.name"),"bar");
fail("SSL connection should fail with NON-SSL server");
}
catch ( SQLException e) {
assertEquals("08S01",e.getSQLState().trim());
}
miniHS2.stop();
System.clearProperty(JAVA_TRUST_STORE_PROP);
System.clearProperty(JAVA_TRUST_STORE_PASS_PROP);
setHttpConfOverlay(confOverlay);
miniHS2.start(confOverlay);
try {
hs2Conn=DriverManager.getConnection(miniHS2.getJdbcURL("default",SSL_CONN_PARAMS),System.getProperty("user.name"),"bar");
fail("SSL connection should fail with NON-SSL server");
}
catch ( SQLException e) {
assertEquals("08S01",e.getSQLState().trim());
}
}
APIUtilityVerifier UtilityVerifier BooleanVerifier InternalCallVerifier AssumptionSetter EqualityVerifier NullVerifier HybridVerifier
/**
* Tests to ensure SSLv2 and SSLv3 are disabled
*/
@Test public void testSSLVersion() throws Exception {
Assume.assumeTrue(execCommand("which openssl") == 0);
Assume.assumeTrue(System.getProperty("os.name").toLowerCase().contains("linux"));
setSslConfOverlay(confOverlay);
setBinaryConfOverlay(confOverlay);
miniHS2.start(confOverlay);
hs2Conn=DriverManager.getConnection(miniHS2.getJdbcURL() + ";ssl=true;sslTrustStore=" + dataFileDir+ File.separator+ TRUST_STORE_NAME+ ";trustStorePassword="+ KEY_STORE_PASSWORD,System.getProperty("user.name"),"bar");
hs2Conn.close();
Assert.assertEquals("Expected exit code of 1",1,execCommand("openssl s_client -connect " + miniHS2.getHost() + ":"+ miniHS2.getBinaryPort()+ " -ssl2 < /dev/null"));
Assert.assertEquals("Expected exit code of 1",1,execCommand("openssl s_client -connect " + miniHS2.getHost() + ":"+ miniHS2.getBinaryPort()+ " -ssl3 < /dev/null"));
miniHS2.stop();
setHttpConfOverlay(confOverlay);
miniHS2.start(confOverlay);
try {
hs2Conn=DriverManager.getConnection(miniHS2.getJdbcURL() + ";ssl=true;sslTrustStore=" + dataFileDir+ File.separator+ TRUST_STORE_NAME+ ";trustStorePassword="+ KEY_STORE_PASSWORD,System.getProperty("user.name"),"bar");
Assert.fail("Expected SQLException during connect");
}
catch ( SQLException e) {
LOG.info("Expected exception: " + e,e);
Assert.assertEquals("08S01",e.getSQLState().trim());
Throwable cause=e.getCause();
Assert.assertNotNull(cause);
while (cause.getCause() != null) {
cause=cause.getCause();
}
Assert.assertEquals("org.apache.http.NoHttpResponseException",cause.getClass().getName());
Assert.assertTrue(cause.getMessage().contains("failed to respond"));
}
miniHS2.stop();
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
/**
* Start HS2 in SSL mode, open a SSL connection and fetch data
* @throws Exception
*/
@Test public void testSSLFetch() throws Exception {
setSslConfOverlay(confOverlay);
setBinaryConfOverlay(confOverlay);
miniHS2.start(confOverlay);
String tableName="sslTab";
Path dataFilePath=new Path(dataFileDir,"kv1.txt");
hs2Conn=DriverManager.getConnection(miniHS2.getJdbcURL("default",SSL_CONN_PARAMS),System.getProperty("user.name"),"bar");
setupTestTableWithData(tableName,dataFilePath,hs2Conn);
Statement stmt=hs2Conn.createStatement();
ResultSet res=stmt.executeQuery("SELECT * FROM " + tableName);
int rowCount=0;
while (res.next()) {
++rowCount;
assertEquals("val_" + res.getInt(1),res.getString(2));
}
assertEquals(500,rowCount);
hs2Conn.close();
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
/**
* Start HS2 in Http mode with SSL enabled, open a SSL connection and fetch data
* @throws Exception
*/
@Test public void testSSLFetchHttp() throws Exception {
setSslConfOverlay(confOverlay);
setHttpConfOverlay(confOverlay);
miniHS2.start(confOverlay);
String tableName="sslTab";
Path dataFilePath=new Path(dataFileDir,"kv1.txt");
hs2Conn=DriverManager.getConnection(miniHS2.getJdbcURL("default",SSL_CONN_PARAMS),System.getProperty("user.name"),"bar");
setupTestTableWithData(tableName,dataFilePath,hs2Conn);
Statement stmt=hs2Conn.createStatement();
ResultSet res=stmt.executeQuery("SELECT * FROM " + tableName);
int rowCount=0;
while (res.next()) {
++rowCount;
assertEquals("val_" + res.getInt(1),res.getString(2));
}
assertEquals(500,rowCount);
hs2Conn.close();
}
Class: org.apache.hive.jdbc.TestServiceDiscovery APIUtilityVerifier EqualityVerifier
@Test public void testConnect() throws Exception {
Map confs=new HashMap();
confs.put("hive.server2.thrift.bind.host","host-1");
confs.put("hive.server2.transport.mode","binary");
confs.put("hive.server2.thrift.port","8000");
confs.put("hive.server2.authentication","PLAIN");
publishConfsToZk(confs,"uri1");
confs.clear();
confs.put("hive.server2.thrift.bind.host","host-2");
confs.put("hive.server2.transport.mode","binary");
confs.put("hive.server2.thrift.port","9000");
confs.put("hive.server2.authentication","PLAIN");
publishConfsToZk(confs,"uri2");
confs.clear();
confs.put("hive.server2.thrift.bind.host","host-3");
confs.put("hive.server2.transport.mode","binary");
confs.put("hive.server2.thrift.port","10000");
confs.put("hive.server2.authentication","PLAIN");
publishConfsToZk(confs,"uri3");
Utils.JdbcConnectionParams connParams=new Utils.JdbcConnectionParams();
connParams.setZooKeeperEnsemble(server.getConnectString());
connParams.getSessionVars().put(Utils.JdbcConnectionParams.ZOOKEEPER_NAMESPACE,"hiveserver2");
List allConnectParams=new ArrayList<>();
while (true) {
try {
ZooKeeperHiveClientHelper.configureConnParams(connParams);
}
catch ( ZooKeeperHiveClientException e) {
break;
}
connParams.getRejectedHostZnodePaths().add(connParams.getCurrentHostZnodePath());
allConnectParams.add(new ConnParamInfo(connParams.getHost(),connParams.getPort(),connParams.getCurrentHostZnodePath()));
}
Collection cp1=Collections2.filter(allConnectParams,new ConnParamInfoPred("host-1",8000,"serverUri=uri1"));
Collection cp2=Collections2.filter(allConnectParams,new ConnParamInfoPred("host-2",9000,"serverUri=uri2"));
Collection cp3=Collections2.filter(allConnectParams,new ConnParamInfoPred("host-3",10000,"serverUri=uri3"));
Assert.assertEquals(cp1.size(),1);
Assert.assertEquals(cp2.size(),1);
Assert.assertEquals(cp3.size(),1);
}
Class: org.apache.hive.jdbc.authorization.TestCLIAuthzSessionContext BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testAuthzSessionContextContents() throws Exception {
driver.processCmd("show tables");
assertTrue("session string size check",sessionCtx.getSessionString().length() > 10);
assertEquals("Client type ",HiveAuthzSessionContext.CLIENT_TYPE.HIVECLI,sessionCtx.getClientType());
}
Class: org.apache.hive.jdbc.authorization.TestHS2AuthzSessionContext BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testAuthzSessionContextContents() throws Exception {
assertTrue("session string size check",sessionCtx.getSessionString().length() > 10);
assertEquals("Client type ",HiveAuthzSessionContext.CLIENT_TYPE.HIVESERVER2,sessionCtx.getClientType());
}
Class: org.apache.hive.jdbc.cbo_rp_TestJdbcDriver2 APIUtilityVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testShowColumns() throws SQLException {
Statement stmt=con.createStatement();
assertNotNull("Statement is null",stmt);
ResultSet res=stmt.executeQuery("show columns in " + tableName);
res.next();
assertEquals("Column name 'under_col' not found","under_col",res.getString(1));
res.next();
assertEquals("Column name 'value' not found","value",res.getString(1));
assertFalse("More results found than expected",res.next());
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
/**
* Verify selecting using builtin UDFs
* @throws SQLException
*/
@Test public void testBuiltInUDFCol() throws SQLException {
Statement stmt=con.createStatement();
ResultSet res=stmt.executeQuery("select c12, bin(c12) from " + dataTypeTableName + " where c1=1");
ResultSetMetaData md=res.getMetaData();
assertEquals(md.getColumnCount(),2);
assertEquals(md.getColumnLabel(2),"_c1");
assertTrue(res.next());
assertEquals(res.getLong(1),1);
assertEquals(res.getString(2),"1");
res.close();
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testShowRoleGrant() throws SQLException {
Statement stmt=con.createStatement();
try {
stmt.execute("drop role role1");
}
catch ( Exception ex) {
LOG.warn("Ignoring error during drop role: " + ex);
}
stmt.execute("create role role1");
stmt.execute("grant role role1 to user hive_test_user");
stmt.execute("show role grant user hive_test_user");
ResultSet res=stmt.getResultSet();
assertTrue(res.next());
assertEquals("public",res.getString(1));
assertTrue(res.next());
assertEquals("role1",res.getString(1));
res.close();
}
APIUtilityVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* test getProcedureColumns()
* @throws SQLException
*/
@Test public void testProcCols() throws SQLException {
DatabaseMetaData dbmd=con.getMetaData();
assertNotNull(dbmd);
ResultSet res=dbmd.getProcedureColumns(null,null,null,null);
ResultSetMetaData md=res.getMetaData();
assertEquals(md.getColumnCount(),20);
assertFalse(res.next());
}
InternalCallVerifier EqualityVerifier
@Test public void testParseUrlHttpMode() throws SQLException, JdbcUriParseException, ZooKeeperHiveClientException {
new HiveDriver();
for ( String[] testValues : HTTP_URL_PROPERTIES) {
JdbcConnectionParams params=Utils.parseURL(testValues[0],new Properties());
assertEquals(params.getHost(),testValues[1]);
assertEquals(params.getPort(),Integer.parseInt(testValues[2]));
assertEquals(params.getDbName(),testValues[3]);
assertEquals(params.getSessionVars().get("transportMode"),testValues[4]);
assertEquals(params.getSessionVars().get("httpPath"),testValues[5]);
}
}
InternalCallVerifier EqualityVerifier
@Test public void testDriverProperties() throws SQLException {
HiveDriver driver=new HiveDriver();
for ( String[] testValues : URL_PROPERTIES) {
DriverPropertyInfo[] dpi=driver.getPropertyInfo(testValues[0],null);
assertEquals("unexpected DriverPropertyInfo array size",3,dpi.length);
assertDpi(dpi[0],"HOST",testValues[1]);
assertDpi(dpi[1],"PORT",testValues[2]);
assertDpi(dpi[2],"DBNAME",testValues[3]);
}
}
APIUtilityVerifier IterativeVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testIntervalTypes() throws Exception {
Statement stmt=con.createStatement();
ResultSet res=stmt.executeQuery("select case when c17 is null then null else interval '1' year end as col1," + " c17 - c17 as col2 from " + dataTypeTableName + " order by col1");
ResultSetMetaData meta=res.getMetaData();
assertEquals("col1",meta.getColumnLabel(1));
assertEquals(java.sql.Types.OTHER,meta.getColumnType(1));
assertEquals("interval_year_month",meta.getColumnTypeName(1));
assertEquals(11,meta.getColumnDisplaySize(1));
assertEquals(11,meta.getPrecision(1));
assertEquals(0,meta.getScale(1));
assertEquals(HiveIntervalYearMonth.class.getName(),meta.getColumnClassName(1));
assertEquals("col2",meta.getColumnLabel(2));
assertEquals(java.sql.Types.OTHER,meta.getColumnType(2));
assertEquals("interval_day_time",meta.getColumnTypeName(2));
assertEquals(29,meta.getColumnDisplaySize(2));
assertEquals(29,meta.getPrecision(2));
assertEquals(0,meta.getScale(2));
assertEquals(HiveIntervalDayTime.class.getName(),meta.getColumnClassName(2));
assertTrue(res.next());
for (int i=1; i < meta.getColumnCount(); i++) {
assertNull("Column " + i + " should be null",res.getObject(i));
}
assertTrue(res.next());
for (int i=1; i < meta.getColumnCount(); i++) {
assertNull("Column " + i + " should be null",res.getObject(i));
}
assertTrue(res.next());
assertEquals("1-0",res.getString(1));
assertEquals(1,((HiveIntervalYearMonth)res.getObject(1)).getYears());
assertEquals("0 00:00:00.000000000",res.getString(2));
assertEquals(0,((HiveIntervalDayTime)res.getObject(2)).getDays());
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testDatabaseMetaData() throws SQLException {
DatabaseMetaData meta=con.getMetaData();
assertEquals("Apache Hive",meta.getDatabaseProductName());
assertEquals(HiveVersionInfo.getVersion(),meta.getDatabaseProductVersion());
assertEquals(System.getProperty("hive.version"),meta.getDatabaseProductVersion());
assertTrue("verifying hive version pattern. got " + meta.getDatabaseProductVersion(),Pattern.matches("\\d+\\.\\d+\\.\\d+.*",meta.getDatabaseProductVersion()));
assertEquals(DatabaseMetaData.sqlStateSQL99,meta.getSQLStateType());
assertFalse(meta.supportsCatalogsInTableDefinitions());
assertTrue(meta.supportsSchemasInTableDefinitions());
assertTrue(meta.supportsSchemasInDataManipulation());
assertFalse(meta.supportsMultipleResultSets());
assertFalse(meta.supportsStoredProcedures());
assertTrue(meta.supportsAlterTableWithAddColumn());
assertTrue(meta.getDatabaseMajorVersion() > -1);
assertTrue(meta.getDatabaseMinorVersion() > -1);
}
UtilityVerifier EqualityVerifier HybridVerifier
@Test public void testResultSetColumnNameCaseInsensitive() throws SQLException {
Statement stmt=con.createStatement();
ResultSet res;
res=stmt.executeQuery("select c1 from " + dataTypeTableName + " limit 1");
try {
int count=0;
while (res.next()) {
res.findColumn("c1");
res.findColumn("C1");
count++;
}
assertEquals(count,1);
}
catch ( Exception e) {
String msg="Unexpected exception: " + e;
LOG.info(msg,e);
fail(msg);
}
res=stmt.executeQuery("select c1 C1 from " + dataTypeTableName + " limit 1");
try {
int count=0;
while (res.next()) {
res.findColumn("c1");
res.findColumn("C1");
count++;
}
assertEquals(count,1);
}
catch ( Exception e) {
String msg="Unexpected exception: " + e;
LOG.info(msg,e);
fail(msg);
}
}
UtilityVerifier EqualityVerifier HybridVerifier
/**
* Negative Test for cursor repositioning to start of resultset
* Verify unsupported JDBC resultset methods
* @throws Exception
*/
@Test public void testFetchFirstError() throws Exception {
Statement stmt=con.createStatement();
ResultSet res=stmt.executeQuery("select * from " + tableName);
try {
res.beforeFirst();
fail("beforeFirst() should fail for normal resultset");
}
catch ( SQLException e) {
assertEquals("Method not supported for TYPE_FORWARD_ONLY resultset",e.getMessage());
}
}
APIUtilityVerifier IterativeVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testResultSetMetaData() throws SQLException {
Statement stmt=con.createStatement();
ResultSet res=stmt.executeQuery("select c1, c2, c3, c4, c5 as a, c6, c7, c8, c9, c10, c11, c12, " + "c1*2, sentences(null, null, null) as b, c17, c18, c20, c21, c22, c23 from " + dataTypeTableName + " limit 1");
ResultSetMetaData meta=res.getMetaData();
ResultSet colRS=con.getMetaData().getColumns(null,null,dataTypeTableName.toLowerCase(),null);
assertEquals(20,meta.getColumnCount());
assertTrue(colRS.next());
assertEquals("c1",meta.getColumnName(1));
assertEquals(Types.INTEGER,meta.getColumnType(1));
assertEquals("int",meta.getColumnTypeName(1));
assertEquals(11,meta.getColumnDisplaySize(1));
assertEquals(10,meta.getPrecision(1));
assertEquals(0,meta.getScale(1));
assertEquals("c1",colRS.getString("COLUMN_NAME"));
assertEquals(Types.INTEGER,colRS.getInt("DATA_TYPE"));
assertEquals("int",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(1),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(1),colRS.getInt("DECIMAL_DIGITS"));
assertTrue(colRS.next());
assertEquals("c2",meta.getColumnName(2));
assertEquals("boolean",meta.getColumnTypeName(2));
assertEquals(Types.BOOLEAN,meta.getColumnType(2));
assertEquals(1,meta.getColumnDisplaySize(2));
assertEquals(1,meta.getPrecision(2));
assertEquals(0,meta.getScale(2));
assertEquals("c2",colRS.getString("COLUMN_NAME"));
assertEquals(Types.BOOLEAN,colRS.getInt("DATA_TYPE"));
assertEquals("boolean",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getScale(2),colRS.getInt("DECIMAL_DIGITS"));
assertTrue(colRS.next());
assertEquals("c3",meta.getColumnName(3));
assertEquals(Types.DOUBLE,meta.getColumnType(3));
assertEquals("double",meta.getColumnTypeName(3));
assertEquals(25,meta.getColumnDisplaySize(3));
assertEquals(15,meta.getPrecision(3));
assertEquals(15,meta.getScale(3));
assertEquals("c3",colRS.getString("COLUMN_NAME"));
assertEquals(Types.DOUBLE,colRS.getInt("DATA_TYPE"));
assertEquals("double",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(3),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(3),colRS.getInt("DECIMAL_DIGITS"));
assertTrue(colRS.next());
assertEquals("c4",meta.getColumnName(4));
assertEquals(Types.VARCHAR,meta.getColumnType(4));
assertEquals("string",meta.getColumnTypeName(4));
assertEquals(Integer.MAX_VALUE,meta.getColumnDisplaySize(4));
assertEquals(Integer.MAX_VALUE,meta.getPrecision(4));
assertEquals(0,meta.getScale(4));
assertEquals("c4",colRS.getString("COLUMN_NAME"));
assertEquals(Types.VARCHAR,colRS.getInt("DATA_TYPE"));
assertEquals("string",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(4),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(4),colRS.getInt("DECIMAL_DIGITS"));
assertTrue(colRS.next());
assertEquals("a",meta.getColumnName(5));
assertEquals(Types.ARRAY,meta.getColumnType(5));
assertEquals("array",meta.getColumnTypeName(5));
assertEquals(Integer.MAX_VALUE,meta.getColumnDisplaySize(5));
assertEquals(Integer.MAX_VALUE,meta.getPrecision(5));
assertEquals(0,meta.getScale(5));
assertEquals("c5",colRS.getString("COLUMN_NAME"));
assertEquals(Types.ARRAY,colRS.getInt("DATA_TYPE"));
assertEquals("array",colRS.getString("TYPE_NAME").toLowerCase());
assertTrue(colRS.next());
assertEquals("c6",meta.getColumnName(6));
assertEquals(Types.JAVA_OBJECT,meta.getColumnType(6));
assertEquals("map",meta.getColumnTypeName(6));
assertEquals(Integer.MAX_VALUE,meta.getColumnDisplaySize(6));
assertEquals(Integer.MAX_VALUE,meta.getPrecision(6));
assertEquals(0,meta.getScale(6));
assertEquals("c6",colRS.getString("COLUMN_NAME"));
assertEquals(Types.JAVA_OBJECT,colRS.getInt("DATA_TYPE"));
assertEquals("map",colRS.getString("TYPE_NAME").toLowerCase());
assertTrue(colRS.next());
assertEquals("c7",meta.getColumnName(7));
assertEquals(Types.JAVA_OBJECT,meta.getColumnType(7));
assertEquals("map",meta.getColumnTypeName(7));
assertEquals(Integer.MAX_VALUE,meta.getColumnDisplaySize(7));
assertEquals(Integer.MAX_VALUE,meta.getPrecision(7));
assertEquals(0,meta.getScale(7));
assertEquals("c7",colRS.getString("COLUMN_NAME"));
assertEquals(Types.JAVA_OBJECT,colRS.getInt("DATA_TYPE"));
assertEquals("map",colRS.getString("TYPE_NAME").toLowerCase());
assertTrue(colRS.next());
assertEquals("c8",meta.getColumnName(8));
assertEquals(Types.STRUCT,meta.getColumnType(8));
assertEquals("struct",meta.getColumnTypeName(8));
assertEquals(Integer.MAX_VALUE,meta.getColumnDisplaySize(8));
assertEquals(Integer.MAX_VALUE,meta.getPrecision(8));
assertEquals(0,meta.getScale(8));
assertEquals("c8",colRS.getString("COLUMN_NAME"));
assertEquals(Types.STRUCT,colRS.getInt("DATA_TYPE"));
assertEquals("struct",colRS.getString("TYPE_NAME").toLowerCase());
assertTrue(colRS.next());
assertEquals("c9",meta.getColumnName(9));
assertEquals(Types.TINYINT,meta.getColumnType(9));
assertEquals("tinyint",meta.getColumnTypeName(9));
assertEquals(4,meta.getColumnDisplaySize(9));
assertEquals(3,meta.getPrecision(9));
assertEquals(0,meta.getScale(9));
assertEquals("c9",colRS.getString("COLUMN_NAME"));
assertEquals(Types.TINYINT,colRS.getInt("DATA_TYPE"));
assertEquals("tinyint",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(9),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(9),colRS.getInt("DECIMAL_DIGITS"));
assertTrue(colRS.next());
assertEquals("c10",meta.getColumnName(10));
assertEquals(Types.SMALLINT,meta.getColumnType(10));
assertEquals("smallint",meta.getColumnTypeName(10));
assertEquals(6,meta.getColumnDisplaySize(10));
assertEquals(5,meta.getPrecision(10));
assertEquals(0,meta.getScale(10));
assertEquals("c10",colRS.getString("COLUMN_NAME"));
assertEquals(Types.SMALLINT,colRS.getInt("DATA_TYPE"));
assertEquals("smallint",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(10),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(10),colRS.getInt("DECIMAL_DIGITS"));
assertTrue(colRS.next());
assertEquals("c11",meta.getColumnName(11));
assertEquals(Types.FLOAT,meta.getColumnType(11));
assertEquals("float",meta.getColumnTypeName(11));
assertEquals(24,meta.getColumnDisplaySize(11));
assertEquals(7,meta.getPrecision(11));
assertEquals(7,meta.getScale(11));
assertEquals("c11",colRS.getString("COLUMN_NAME"));
assertEquals(Types.FLOAT,colRS.getInt("DATA_TYPE"));
assertEquals("float",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(11),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(11),colRS.getInt("DECIMAL_DIGITS"));
assertTrue(colRS.next());
assertEquals("c12",meta.getColumnName(12));
assertEquals(Types.BIGINT,meta.getColumnType(12));
assertEquals("bigint",meta.getColumnTypeName(12));
assertEquals(20,meta.getColumnDisplaySize(12));
assertEquals(19,meta.getPrecision(12));
assertEquals(0,meta.getScale(12));
assertEquals("c12",colRS.getString("COLUMN_NAME"));
assertEquals(Types.BIGINT,colRS.getInt("DATA_TYPE"));
assertEquals("bigint",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(12),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(12),colRS.getInt("DECIMAL_DIGITS"));
assertEquals("_c12",meta.getColumnName(13));
assertEquals(Types.INTEGER,meta.getColumnType(13));
assertEquals("int",meta.getColumnTypeName(13));
assertEquals(11,meta.getColumnDisplaySize(13));
assertEquals(10,meta.getPrecision(13));
assertEquals(0,meta.getScale(13));
assertEquals("b",meta.getColumnName(14));
assertEquals(Types.ARRAY,meta.getColumnType(14));
assertEquals("array",meta.getColumnTypeName(14));
assertEquals(Integer.MAX_VALUE,meta.getColumnDisplaySize(14));
assertEquals(Integer.MAX_VALUE,meta.getPrecision(14));
assertEquals(0,meta.getScale(14));
assertTrue(colRS.next());
assertTrue(colRS.next());
assertTrue(colRS.next());
assertTrue(colRS.next());
assertTrue(colRS.next());
assertEquals("c17",meta.getColumnName(15));
assertEquals(Types.TIMESTAMP,meta.getColumnType(15));
assertEquals("timestamp",meta.getColumnTypeName(15));
assertEquals(29,meta.getColumnDisplaySize(15));
assertEquals(29,meta.getPrecision(15));
assertEquals(9,meta.getScale(15));
assertEquals("c17",colRS.getString("COLUMN_NAME"));
assertEquals(Types.TIMESTAMP,colRS.getInt("DATA_TYPE"));
assertEquals("timestamp",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(15),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(15),colRS.getInt("DECIMAL_DIGITS"));
assertTrue(colRS.next());
assertEquals("c18",meta.getColumnName(16));
assertEquals(Types.DECIMAL,meta.getColumnType(16));
assertEquals("decimal",meta.getColumnTypeName(16));
assertEquals(18,meta.getColumnDisplaySize(16));
assertEquals(16,meta.getPrecision(16));
assertEquals(7,meta.getScale(16));
assertEquals("c18",colRS.getString("COLUMN_NAME"));
assertEquals(Types.DECIMAL,colRS.getInt("DATA_TYPE"));
assertEquals("decimal",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(16),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(16),colRS.getInt("DECIMAL_DIGITS"));
assertTrue(colRS.next());
assertTrue(colRS.next());
assertEquals("c20",meta.getColumnName(17));
assertEquals(Types.DATE,meta.getColumnType(17));
assertEquals("date",meta.getColumnTypeName(17));
assertEquals(10,meta.getColumnDisplaySize(17));
assertEquals(10,meta.getPrecision(17));
assertEquals(0,meta.getScale(17));
assertEquals("c20",colRS.getString("COLUMN_NAME"));
assertEquals(Types.DATE,colRS.getInt("DATA_TYPE"));
assertEquals("date",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(17),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(17),colRS.getInt("DECIMAL_DIGITS"));
assertTrue(colRS.next());
assertEquals("c21",meta.getColumnName(18));
assertEquals(Types.VARCHAR,meta.getColumnType(18));
assertEquals("varchar",meta.getColumnTypeName(18));
assertEquals(20,meta.getColumnDisplaySize(18));
assertEquals(20,meta.getPrecision(18));
assertEquals(0,meta.getScale(18));
assertEquals("c21",colRS.getString("COLUMN_NAME"));
assertEquals(Types.VARCHAR,colRS.getInt("DATA_TYPE"));
assertEquals("varchar",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(18),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(18),colRS.getInt("DECIMAL_DIGITS"));
assertTrue(colRS.next());
assertEquals("c22",meta.getColumnName(19));
assertEquals(Types.CHAR,meta.getColumnType(19));
assertEquals("char",meta.getColumnTypeName(19));
assertEquals(15,meta.getColumnDisplaySize(19));
assertEquals(15,meta.getPrecision(19));
assertEquals(0,meta.getScale(19));
assertEquals("c23",meta.getColumnName(20));
assertEquals(Types.BINARY,meta.getColumnType(20));
assertEquals("binary",meta.getColumnTypeName(20));
assertEquals(Integer.MAX_VALUE,meta.getColumnDisplaySize(20));
assertEquals(Integer.MAX_VALUE,meta.getPrecision(20));
assertEquals(0,meta.getScale(20));
assertEquals("c22",colRS.getString("COLUMN_NAME"));
assertEquals(Types.CHAR,colRS.getInt("DATA_TYPE"));
assertEquals("char",colRS.getString("TYPE_NAME").toLowerCase());
assertEquals(meta.getPrecision(19),colRS.getInt("COLUMN_SIZE"));
assertEquals(meta.getScale(19),colRS.getInt("DECIMAL_DIGITS"));
for (int i=1; i <= meta.getColumnCount(); i++) {
assertFalse(meta.isAutoIncrement(i));
assertFalse(meta.isCurrency(i));
assertEquals(ResultSetMetaData.columnNullable,meta.isNullable(i));
}
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
/**
* verify 'explain ...' resultset
* @throws SQLException
*/
@Test public void testExplainStmt() throws SQLException {
Statement stmt=con.createStatement();
ResultSet res=stmt.executeQuery("explain select c1, c2, c3, c4, c5 as a, c6, c7, c8, c9, c10, c11, c12, " + "c1*2, sentences(null, null, null) as b, c23 from " + dataTypeTableName + " limit 1");
ResultSetMetaData md=res.getMetaData();
assertEquals(md.getColumnCount(),1);
assertEquals(md.getColumnLabel(1),EXPL_COLUMN_NAME);
assertTrue("Nothing returned explain",res.next());
}
APIUtilityVerifier EqualityVerifier
/**
* Validate the Metadata for the result set of a metadata getColumns call.
*/
@Test public void testMetaDataGetColumnsMetaData() throws SQLException {
ResultSet rs=con.getMetaData().getColumns(null,null,"testhivejdbcdriver\\_table",null);
ResultSetMetaData rsmd=rs.getMetaData();
assertEquals("TABLE_CAT",rsmd.getColumnName(1));
assertEquals(Types.VARCHAR,rsmd.getColumnType(1));
assertEquals(Integer.MAX_VALUE,rsmd.getColumnDisplaySize(1));
assertEquals("ORDINAL_POSITION",rsmd.getColumnName(17));
assertEquals(Types.INTEGER,rsmd.getColumnType(17));
assertEquals(11,rsmd.getColumnDisplaySize(17));
}
APIUtilityVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testDescribeTable() throws SQLException {
Statement stmt=con.createStatement();
assertNotNull("Statement is null",stmt);
ResultSet res=stmt.executeQuery("describe " + tableName);
res.next();
assertEquals("Column name 'under_col' not found","under_col",res.getString(1));
assertEquals("Column type 'under_col' for column under_col not found","int",res.getString(2));
res.next();
assertEquals("Column name 'value' not found","value",res.getString(1));
assertEquals("Column type 'string' for column key not found","string",res.getString(2));
assertFalse("More results found than expected",res.next());
}
APIUtilityVerifier IterativeVerifier BranchVerifier EqualityVerifier
@Test public void testMetaDataGetColumns() throws SQLException {
Map tests=new HashMap();
tests.put(new String[]{"testhivejdbcdriver\\_table",null},2);
tests.put(new String[]{"testhivejdbc%",null},7);
tests.put(new String[]{"testhiveJDBC%",null},7);
tests.put(new String[]{"%jdbcdriver\\_table",null},2);
tests.put(new String[]{"%jdbcdriver\\_table%","under\\_col"},1);
tests.put(new String[]{"%jdbcdriver\\_table%","under\\_co_"},1);
tests.put(new String[]{"%jdbcdriver\\_table%","under_col"},1);
tests.put(new String[]{"%jdbcdriver\\_table%","und%"},1);
tests.put(new String[]{"%jdbcdriver\\_table%","%"},2);
tests.put(new String[]{"%jdbcdriver\\_table%","_%"},2);
for ( String[] checkPattern : tests.keySet()) {
ResultSet rs=con.getMetaData().getColumns(null,null,checkPattern[0],checkPattern[1]);
ResultSetMetaData rsmd=rs.getMetaData();
assertEquals("TABLE_CAT",rsmd.getColumnName(1));
int cnt=0;
while (rs.next()) {
String columnname=rs.getString("COLUMN_NAME");
int ordinalPos=rs.getInt("ORDINAL_POSITION");
switch (cnt) {
case 0:
assertEquals("Wrong column name found","under_col",columnname);
assertEquals("Wrong ordinal position found",ordinalPos,1);
break;
case 1:
assertEquals("Wrong column name found","value",columnname);
assertEquals("Wrong ordinal position found",ordinalPos,2);
break;
default :
break;
}
cnt++;
}
rs.close();
assertEquals("Found less columns then we test for.",tests.get(checkPattern).intValue(),cnt);
}
}
APIUtilityVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* test testProccedures()
* @throws SQLException
*/
@Test public void testProccedures() throws SQLException {
DatabaseMetaData dbmd=con.getMetaData();
assertNotNull(dbmd);
ResultSet res=dbmd.getProcedures(null,null,null);
ResultSetMetaData md=res.getMetaData();
assertEquals(md.getColumnCount(),9);
assertFalse(res.next());
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testMetaDataGetCatalogs() throws SQLException {
ResultSet rs=con.getMetaData().getCatalogs();
ResultSetMetaData resMeta=rs.getMetaData();
assertEquals(1,resMeta.getColumnCount());
assertEquals("TABLE_CAT",resMeta.getColumnName(1));
assertFalse(rs.next());
}
APIUtilityVerifier UtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testPrepareSetDate() throws Exception {
try {
String sql="select * from " + dataTypeTableName + " where c20 = ?";
PreparedStatement ps=con.prepareStatement(sql);
java.sql.Date dtValue=java.sql.Date.valueOf("2013-01-01");
ps.setDate(1,dtValue);
ResultSet res=ps.executeQuery();
assertTrue(res.next());
assertEquals("2013-01-01",res.getString(20));
ps.close();
}
catch ( Exception e) {
e.printStackTrace();
fail(e.toString());
}
}
APIUtilityVerifier IterativeVerifier BooleanVerifier EqualityVerifier NullVerifier PublicFieldVerifier HybridVerifier
@Test public void testDataTypes() throws Exception {
Statement stmt=con.createStatement();
ResultSet res=stmt.executeQuery("select * from " + dataTypeTableName + " order by c1");
ResultSetMetaData meta=res.getMetaData();
assertTrue(res.next());
for (int i=1; i < meta.getColumnCount(); i++) {
assertNull("Column " + i + " should be null",res.getObject(i));
}
assertEquals(0,res.getInt(1));
assertEquals(false,res.getBoolean(2));
assertEquals(0d,res.getDouble(3),floatCompareDelta);
assertEquals(null,res.getString(4));
assertEquals(null,res.getString(5));
assertEquals(null,res.getString(6));
assertEquals(null,res.getString(7));
assertEquals(null,res.getString(8));
assertEquals(0,res.getByte(9));
assertEquals(0,res.getShort(10));
assertEquals(0f,res.getFloat(11),floatCompareDelta);
assertEquals(0L,res.getLong(12));
assertEquals(null,res.getString(13));
assertEquals(null,res.getString(14));
assertEquals(null,res.getString(15));
assertEquals(null,res.getString(16));
assertEquals(null,res.getString(17));
assertEquals(null,res.getString(18));
assertEquals(null,res.getString(19));
assertEquals(null,res.getString(20));
assertEquals(null,res.getDate(20));
assertEquals(null,res.getString(21));
assertEquals(null,res.getString(22));
assertTrue(res.next());
assertEquals(-1,res.getInt(1));
assertEquals(false,res.getBoolean(2));
assertEquals(-1.1d,res.getDouble(3),floatCompareDelta);
assertEquals("",res.getString(4));
assertEquals("[]",res.getString(5));
assertEquals("{}",res.getString(6));
assertEquals("{}",res.getString(7));
assertEquals("{\"r\":null,\"s\":null,\"t\":null}",res.getString(8));
assertEquals(-1,res.getByte(9));
assertEquals(-1,res.getShort(10));
assertEquals(-1.0f,res.getFloat(11),floatCompareDelta);
assertEquals(-1,res.getLong(12));
assertEquals("[]",res.getString(13));
assertEquals("{}",res.getString(14));
assertEquals("{\"r\":null,\"s\":null}",res.getString(15));
assertEquals("[]",res.getString(16));
assertEquals(null,res.getString(17));
assertEquals(null,res.getTimestamp(17));
assertEquals(null,res.getBigDecimal(18));
assertEquals(null,res.getString(19));
assertEquals(null,res.getString(20));
assertEquals(null,res.getDate(20));
assertEquals(null,res.getString(21));
assertEquals(null,res.getString(22));
assertEquals(null,res.getString(23));
assertTrue(res.next());
assertEquals(1,res.getInt(1));
assertEquals(true,res.getBoolean(2));
assertEquals(1.1d,res.getDouble(3),floatCompareDelta);
assertEquals("1",res.getString(4));
assertEquals("[1,2]",res.getString(5));
assertEquals("{1:\"x\",2:\"y\"}",res.getString(6));
assertEquals("{\"k\":\"v\"}",res.getString(7));
assertEquals("{\"r\":\"a\",\"s\":9,\"t\":2.2}",res.getString(8));
assertEquals(1,res.getByte(9));
assertEquals(1,res.getShort(10));
assertEquals(1.0f,res.getFloat(11),floatCompareDelta);
assertEquals(1,res.getLong(12));
assertEquals("[[\"a\",\"b\"],[\"c\",\"d\"]]",res.getString(13));
assertEquals("{1:{11:12,13:14},2:{21:22}}",res.getString(14));
assertEquals("{\"r\":1,\"s\":{\"a\":2,\"b\":\"x\"}}",res.getString(15));
assertEquals("[{\"m\":{},\"n\":1},{\"m\":{\"a\":\"b\",\"c\":\"d\"},\"n\":2}]",res.getString(16));
assertEquals("2012-04-22 09:00:00.123456789",res.getString(17));
assertEquals("2012-04-22 09:00:00.123456789",res.getTimestamp(17).toString());
assertEquals("123456789.0123456",res.getBigDecimal(18).toString());
assertEquals("abcd",res.getString(19));
assertEquals("2013-01-01",res.getString(20));
assertEquals("2013-01-01",res.getDate(20).toString());
assertEquals("abc123",res.getString(21));
assertEquals("abc123 ",res.getString(22));
byte[] bytes="X'01FF'".getBytes("UTF-8");
InputStream resultSetInputStream=res.getBinaryStream(23);
int len=bytes.length;
byte[] b=new byte[len];
resultSetInputStream.read(b,0,len);
for (int i=0; i < len; i++) {
assertEquals(bytes[i],b[i]);
}
assertEquals(true,res.getBoolean(1));
assertEquals(true,res.getBoolean(4));
assertFalse(meta.isCaseSensitive(1));
assertFalse(meta.isCaseSensitive(2));
assertFalse(meta.isCaseSensitive(3));
assertTrue(meta.isCaseSensitive(4));
assertFalse(res.next());
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testDuplicateColumnNameOrder() throws SQLException {
Statement stmt=con.createStatement();
ResultSet rs=stmt.executeQuery("SELECT 1 AS a, 2 AS a from " + tableName);
assertTrue(rs.next());
assertEquals(1,rs.getInt("a"));
rs.close();
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
/**
* Verify selecting named expression columns
* @throws SQLException
*/
@Test public void testExprCol() throws SQLException {
Statement stmt=con.createStatement();
ResultSet res=stmt.executeQuery("select c1+1 as col1, length(c4) as len from " + dataTypeTableName + " where c1=1");
ResultSetMetaData md=res.getMetaData();
assertEquals(md.getColumnCount(),2);
assertEquals(md.getColumnLabel(1),"col1");
assertEquals(md.getColumnLabel(2),"len");
assertTrue(res.next());
assertEquals(res.getInt(1),2);
assertEquals(res.getInt(2),1);
res.close();
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testMetaDataGetSchemas() throws SQLException {
ResultSet rs=con.getMetaData().getSchemas();
ResultSetMetaData resMeta=rs.getMetaData();
assertEquals(2,resMeta.getColumnCount());
assertEquals("TABLE_SCHEM",resMeta.getColumnName(1));
assertEquals("TABLE_CATALOG",resMeta.getColumnName(2));
assertTrue(rs.next());
assertEquals("default",rs.getString(1));
assertFalse(rs.next());
rs.close();
}
APIUtilityVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* test getPrimaryKeys()
* @throws SQLException
*/
@Test public void testPrimaryKeys() throws SQLException {
DatabaseMetaData dbmd=con.getMetaData();
assertNotNull(dbmd);
ResultSet res=dbmd.getPrimaryKeys(null,null,null);
ResultSetMetaData md=res.getMetaData();
assertEquals(md.getColumnCount(),6);
assertFalse(res.next());
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testResultSetMetaDataDuplicateColumnNames() throws SQLException {
Statement stmt=con.createStatement();
ResultSet res=stmt.executeQuery("select c1 as c2_1, c2, c1*2 from " + dataTypeTableName + " limit 1");
ResultSetMetaData meta=res.getMetaData();
ResultSet colRS=con.getMetaData().getColumns(null,null,dataTypeTableName.toLowerCase(),null);
assertEquals(3,meta.getColumnCount());
assertTrue(colRS.next());
assertEquals("c2_1",meta.getColumnName(1));
assertTrue(colRS.next());
assertEquals("c2",meta.getColumnName(2));
assertTrue(colRS.next());
assertEquals("_c2",meta.getColumnName(3));
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
/**
* validate schema generated by "set" command
* @throws SQLException
*/
@Test public void testSetCommand() throws SQLException {
String sql="set -v";
Statement stmt=con.createStatement();
ResultSet res=stmt.executeQuery(sql);
ResultSetMetaData md=res.getMetaData();
assertEquals(1,md.getColumnCount());
assertEquals(SET_COLUMN_NAME,md.getColumnLabel(1));
assertTrue("Nothing returned by set -v",res.next());
res.close();
stmt.close();
}
APIUtilityVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testShowGrant() throws SQLException {
Statement stmt=con.createStatement();
stmt.execute("grant select on table " + dataTypeTableName + " to user hive_test_user");
stmt.execute("show grant user hive_test_user on table " + dataTypeTableName);
ResultSet res=stmt.getResultSet();
assertTrue(res.next());
assertEquals("default",res.getString(1));
assertEquals(dataTypeTableName,res.getString(2));
assertEquals("",res.getString(3));
assertEquals("",res.getString(4));
assertEquals("hive_test_user",res.getString(5));
assertEquals("USER",res.getString(6));
assertEquals("SELECT",res.getString(7));
assertEquals(false,res.getBoolean(8));
assertEquals(-1,res.getLong(9));
assertNotNull(res.getString(10));
assertFalse(res.next());
res.close();
}
APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testPrepareSetTimestamp() throws SQLException, ParseException {
String sql=String.format("SELECT * FROM %s WHERE c17 = ?",dataTypeTableName);
try (PreparedStatement ps=con.prepareStatement(sql)){
Timestamp timestamp=Timestamp.valueOf("2012-04-22 09:00:00.123456789");
ps.setTimestamp(1,timestamp);
try (ResultSet resultSet=ps.executeQuery()){
assertTrue(resultSet.next());
assertEquals(1,resultSet.getInt(1));
assertFalse(resultSet.next());
}
}
}
UtilityVerifier EqualityVerifier HybridVerifier
@Test public void testErrorDiag() throws SQLException {
Statement stmt=con.createStatement();
try {
stmt.executeQuery("select from " + dataTypeTableName);
fail("SQLException is expected");
}
catch ( SQLException e) {
assertEquals("42000",e.getSQLState());
}
try {
stmt.executeQuery("select * from nonTable");
fail("SQLException is expected");
}
catch ( SQLException e) {
assertEquals("42S02",e.getSQLState());
}
try {
stmt.executeQuery("select zzzz from " + dataTypeTableName);
fail("SQLException is expected");
}
catch ( SQLException e) {
assertEquals("42000",e.getSQLState());
}
}
UtilityVerifier EqualityVerifier HybridVerifier
/**
* Negative Test for cursor repositioning to start of resultset
* Verify unsupported JDBC resultset attributes
* @throws Exception
*/
@Test public void testUnsupportedFetchTypes() throws Exception {
try {
con.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE,ResultSet.CONCUR_READ_ONLY);
fail("createStatement with TYPE_SCROLL_SENSITIVE should fail");
}
catch ( SQLException e) {
assertEquals("HYC00",e.getSQLState().trim());
}
try {
con.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,ResultSet.CONCUR_UPDATABLE);
fail("createStatement with CONCUR_UPDATABLE should fail");
}
catch ( SQLException e) {
assertEquals("HYC00",e.getSQLState().trim());
}
}
APIUtilityVerifier BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* test getImportedKeys()
* @throws SQLException
*/
@Test public void testImportedKeys() throws SQLException {
DatabaseMetaData dbmd=con.getMetaData();
assertNotNull(dbmd);
ResultSet res=dbmd.getImportedKeys(null,null,null);
ResultSetMetaData md=res.getMetaData();
assertEquals(md.getColumnCount(),14);
assertFalse(res.next());
}
Class: org.apache.hive.jdbc.miniHS2.TestHiveServer2 InternalCallVerifier EqualityVerifier
/**
* Open a new session and execute a set command
* @throws Exception
*/
@Test public void testGetVariableValue() throws Exception {
CLIServiceClient serviceClient=miniHS2.getServiceClient();
SessionHandle sessHandle=serviceClient.openSession("foo","bar");
OperationHandle opHandle=serviceClient.executeStatement(sessHandle,"set system:os.name",confOverlay);
RowSet rowSet=serviceClient.fetchResults(opHandle);
assertEquals(1,rowSet.numRows());
serviceClient.closeSession(sessHandle);
}
Class: org.apache.hive.minikdc.TestHiveAuthFactory InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* Verify that delegation token manager is started with no exception for MemoryTokenStore
* @throws Exception
*/
@Test public void testStartTokenManagerForMemoryTokenStore() throws Exception {
hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION,HiveAuthFactory.AuthTypes.KERBEROS.getAuthName());
String principalName=miniHiveKdc.getFullHiveServicePrincipal();
System.out.println("Principal: " + principalName);
hiveConf.setVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL,principalName);
String keyTabFile=miniHiveKdc.getKeyTabFile(miniHiveKdc.getHiveServicePrincipal());
System.out.println("keyTabFile: " + keyTabFile);
Assert.assertNotNull(keyTabFile);
hiveConf.setVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB,keyTabFile);
HiveAuthFactory authFactory=new HiveAuthFactory(hiveConf);
Assert.assertNotNull(authFactory);
Assert.assertEquals("org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge$Server$TUGIAssumingTransportFactory",authFactory.getAuthTransFactory().getClass().getName());
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* Verify that delegation token manager is started with no exception for DBTokenStore
* @throws Exception
*/
@Test public void testStartTokenManagerForDBTokenStore() throws Exception {
hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION,HiveAuthFactory.AuthTypes.KERBEROS.getAuthName());
String principalName=miniHiveKdc.getFullHiveServicePrincipal();
System.out.println("Principal: " + principalName);
hiveConf.setVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL,principalName);
String keyTabFile=miniHiveKdc.getKeyTabFile(miniHiveKdc.getHiveServicePrincipal());
System.out.println("keyTabFile: " + keyTabFile);
Assert.assertNotNull(keyTabFile);
hiveConf.setVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB,keyTabFile);
hiveConf.setVar(ConfVars.METASTORE_CLUSTER_DELEGATION_TOKEN_STORE_CLS,"org.apache.hadoop.hive.thrift.DBTokenStore");
HiveAuthFactory authFactory=new HiveAuthFactory(hiveConf);
Assert.assertNotNull(authFactory);
Assert.assertEquals("org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge$Server$TUGIAssumingTransportFactory",authFactory.getAuthTransFactory().getClass().getName());
}
Class: org.apache.hive.minikdc.TestHs2HooksWithMiniKdc BooleanVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* Test that hook context properties are correctly set.
*/
@Test public void testHookContexts() throws Throwable {
miniHiveKdc.loginUser(MiniHiveKdc.HIVE_TEST_USER_1);
hs2Conn=DriverManager.getConnection(miniHS2.getJdbcURL());
Statement stmt=hs2Conn.createStatement();
stmt.executeQuery("show databases");
stmt.executeQuery("show tables");
Throwable error=PostExecHook.error;
if (error != null) {
throw error;
}
error=PreExecHook.error;
if (error != null) {
throw error;
}
Assert.assertNotNull(PostExecHook.ipAddress,"ipaddress is null");
Assert.assertNotNull(PostExecHook.userName,"userName is null");
Assert.assertNotNull(PostExecHook.operation,"operation is null");
Assert.assertEquals(MiniHiveKdc.HIVE_TEST_USER_1,PostExecHook.userName);
Assert.assertTrue(PostExecHook.ipAddress,PostExecHook.ipAddress.contains("127.0.0.1"));
Assert.assertEquals("SHOWTABLES",PostExecHook.operation);
Assert.assertNotNull(PreExecHook.ipAddress,"ipaddress is null");
Assert.assertNotNull(PreExecHook.userName,"userName is null");
Assert.assertNotNull(PreExecHook.operation,"operation is null");
Assert.assertEquals(MiniHiveKdc.HIVE_TEST_USER_1,PreExecHook.userName);
Assert.assertTrue(PreExecHook.ipAddress,PreExecHook.ipAddress.contains("127.0.0.1"));
Assert.assertEquals("SHOWTABLES",PreExecHook.operation);
error=SemanticAnalysisHook.preAnalyzeError;
if (error != null) {
throw error;
}
error=SemanticAnalysisHook.postAnalyzeError;
if (error != null) {
throw error;
}
Assert.assertNotNull(SemanticAnalysisHook.ipAddress,"semantic hook context ipaddress is null");
Assert.assertNotNull(SemanticAnalysisHook.userName,"semantic hook context userName is null");
Assert.assertNotNull(SemanticAnalysisHook.command,"semantic hook context command is null");
Assert.assertTrue(SemanticAnalysisHook.ipAddress,SemanticAnalysisHook.ipAddress.contains("127.0.0.1"));
Assert.assertEquals("show tables",SemanticAnalysisHook.command);
}
Class: org.apache.hive.minikdc.TestJdbcWithMiniKdc UtilityVerifier EqualityVerifier HybridVerifier
/**
* Negative test, verify that connection to secure HS2 fails when
* required connection attributes are not provided
* @throws Exception
*/
@Test public void testConnectionNeg() throws Exception {
miniHiveKdc.loginUser(MiniHiveKdc.HIVE_TEST_USER_1);
try {
String url=miniHS2.getJdbcURL().replaceAll(";principal.*","");
hs2Conn=DriverManager.getConnection(url);
fail("NON kerberos connection should fail");
}
catch ( SQLException e) {
assertEquals("08S01",e.getSQLState().trim());
}
}
Class: org.apache.hive.minikdc.TestMiniHiveKdc BooleanVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testLogin() throws Exception {
String servicePrinc=miniHiveKdc.getHiveServicePrincipal();
assertNotNull(servicePrinc);
miniHiveKdc.loginUser(servicePrinc);
assertTrue(UserGroupInformation.isLoginKeytabBased());
UserGroupInformation ugi=Utils.getUGI();
assertEquals(MiniHiveKdc.HIVE_SERVICE_PRINCIPAL,ugi.getShortUserName());
}
Class: org.apache.hive.ptest.execution.TestExecutionPhase EqualityVerifier
@Test public void testPassingUnitTest() throws Throwable {
setupUnitTest();
copyTestOutput("SomeTest-success.xml",succeededLogDir,testBatch.getName());
getPhase().execute();
Approvals.verify(getExecutedCommands());
Assert.assertEquals(Sets.newHashSet("SomeTest." + QFILENAME),executedTests);
Assert.assertEquals(Sets.newHashSet(),failedTests);
}
EqualityVerifier
@Test public void testFailingUnitTest() throws Throwable {
setupUnitTest();
sshCommandExecutor.putFailure("bash " + LOCAL_DIR + "/"+ HOST+ "-"+ USER+ "-0/scratch/hiveptest-"+ DRIVER+ ".sh",1);
copyTestOutput("SomeTest-failure.xml",failedLogDir,testBatch.getName());
getPhase().execute();
Approvals.verify(getExecutedCommands());
Assert.assertEquals(Sets.newHashSet("SomeTest." + QFILENAME),executedTests);
Assert.assertEquals(Sets.newHashSet("SomeTest." + QFILENAME),failedTests);
}
EqualityVerifier
@Test public void testPassingQFileTest() throws Throwable {
setupQFile(true);
copyTestOutput("SomeTest-success.xml",succeededLogDir,testBatch.getName());
getPhase().execute();
Approvals.verify(getExecutedCommands());
Assert.assertEquals(Sets.newHashSet("SomeTest." + QFILENAME),executedTests);
Assert.assertEquals(Sets.newHashSet(),failedTests);
}
EqualityVerifier
@Test public void testFailingQFile() throws Throwable {
setupQFile(true);
sshCommandExecutor.putFailure("bash " + LOCAL_DIR + "/"+ HOST+ "-"+ USER+ "-0/scratch/hiveptest-"+ DRIVER+ "-"+ QFILENAME+ ".sh",1);
copyTestOutput("SomeTest-failure.xml",failedLogDir,testBatch.getName());
getPhase().execute();
Approvals.verify(getExecutedCommands());
Assert.assertEquals(Sets.newHashSet("SomeTest." + QFILENAME),executedTests);
Assert.assertEquals(Sets.newHashSet("SomeTest." + QFILENAME),failedTests);
}
Class: org.apache.hive.ptest.execution.TestHostExecutor BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testParallelFailsOnExec() throws Exception {
sshCommandExecutor.putFailure("bash /some/local/dir/somehost-someuser-0/scratch/hiveptest-driver-parallel-1.sh",Constants.EXIT_CODE_UNKNOWN);
HostExecutor executor=createHostExecutor();
parallelWorkQueue.addAll(Lists.newArrayList(testBatchParallel1));
executor.submitTests(parallelWorkQueue,isolatedWorkQueue,failedTestResults).get();
Assert.assertEquals(Collections.emptySet(),failedTestResults);
Assert.assertTrue(parallelWorkQueue.toString(),parallelWorkQueue.isEmpty());
Approvals.verify(getExecutedCommands());
}
EqualityVerifier
@Test public void testBasic() throws Exception {
HostExecutor executor=createHostExecutor();
parallelWorkQueue.addAll(Lists.newArrayList(testBatchParallel1,testBatchParallel2));
parallelWorkQueue.addAll(Lists.newArrayList(testBatchIsolated1,testBatchIsolated2));
executor.submitTests(parallelWorkQueue,isolatedWorkQueue,failedTestResults).get();
Assert.assertEquals(Collections.emptySet(),failedTestResults);
Approvals.verify(getExecutedCommands());
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testShutdownBeforeExec() throws Exception {
rsyncCommandExecutor.putFailure("/tmp/hive-ptest-units/TestHostExecutor/scratch/hiveptest-driver-parallel-1.sh " + "/some/local/dir/somehost-someuser-0/scratch/hiveptest-driver-parallel-1.sh",Constants.EXIT_CODE_UNKNOWN);
HostExecutor executor=createHostExecutor();
parallelWorkQueue.addAll(Lists.newArrayList(testBatchParallel1));
executor.shutdownNow();
executor.submitTests(parallelWorkQueue,isolatedWorkQueue,failedTestResults).get();
Assert.assertEquals(Collections.emptySet(),failedTestResults);
Assert.assertEquals(parallelWorkQueue.toString(),1,parallelWorkQueue.size());
Approvals.verify("EMPTY\n" + getExecutedCommands());
Assert.assertTrue(executor.isShutdown());
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testIsolatedFailsOnRsyncOne() throws Exception {
rsyncCommandExecutor.putFailure("/tmp/hive-ptest-units/TestHostExecutor/scratch/hiveptest-driver-isolated-1.sh " + "/some/local/dir/somehost-someuser-0/scratch/hiveptest-driver-isolated-1.sh",1);
HostExecutor executor=createHostExecutor();
isolatedWorkQueue.addAll(Lists.newArrayList(testBatchIsolated1));
executor.submitTests(parallelWorkQueue,isolatedWorkQueue,failedTestResults).get();
Assert.assertEquals(Collections.emptySet(),failedTestResults);
Assert.assertTrue(isolatedWorkQueue.toString(),parallelWorkQueue.isEmpty());
Approvals.verify(getExecutedCommands());
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testIsolatedFailsOnRsyncUnknown() throws Exception {
rsyncCommandExecutor.putFailure("/tmp/hive-ptest-units/TestHostExecutor/scratch/hiveptest-driver-isolated-1.sh " + "/some/local/dir/somehost-someuser-0/scratch/hiveptest-driver-isolated-1.sh",Constants.EXIT_CODE_UNKNOWN);
HostExecutor executor=createHostExecutor();
isolatedWorkQueue.addAll(Lists.newArrayList(testBatchIsolated1));
executor.submitTests(parallelWorkQueue,isolatedWorkQueue,failedTestResults).get();
Assert.assertEquals(Collections.emptySet(),failedTestResults);
Assert.assertTrue(isolatedWorkQueue.toString(),isolatedWorkQueue.isEmpty());
Approvals.verify(getExecutedCommands());
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testParallelFailsOnRsync() throws Exception {
rsyncCommandExecutor.putFailure("/tmp/hive-ptest-units/TestHostExecutor/scratch/hiveptest-driver-parallel-1.sh " + "/some/local/dir/somehost-someuser-0/scratch/hiveptest-driver-parallel-1.sh",Constants.EXIT_CODE_UNKNOWN);
HostExecutor executor=createHostExecutor();
parallelWorkQueue.addAll(Lists.newArrayList(testBatchParallel1));
executor.submitTests(parallelWorkQueue,isolatedWorkQueue,failedTestResults).get();
Assert.assertEquals(Collections.emptySet(),failedTestResults);
Assert.assertTrue(parallelWorkQueue.toString(),parallelWorkQueue.isEmpty());
Approvals.verify(getExecutedCommands());
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testIsolatedFailsOnExec() throws Exception {
sshCommandExecutor.putFailure("bash /some/local/dir/somehost-someuser-0/scratch/hiveptest-driver-isolated-1.sh",Constants.EXIT_CODE_UNKNOWN);
HostExecutor executor=createHostExecutor();
isolatedWorkQueue.addAll(Lists.newArrayList(testBatchIsolated1));
executor.submitTests(parallelWorkQueue,isolatedWorkQueue,failedTestResults).get();
Assert.assertEquals(Collections.emptySet(),failedTestResults);
Assert.assertTrue(isolatedWorkQueue.toString(),parallelWorkQueue.isEmpty());
Approvals.verify(getExecutedCommands());
}
Class: org.apache.hive.ptest.execution.TestJIRAService EqualityVerifier
@Test public void testTrimMesssagesBoundry(){
List messages=Lists.newArrayList();
Assert.assertEquals(messages,JIRAService.trimMessages(messages));
messages.clear();
for (int i=0; i < JIRAService.MAX_MESSAGES; i++) {
messages.add(String.valueOf(i));
}
Assert.assertEquals(messages,JIRAService.trimMessages(messages));
}
EqualityVerifier
@Test public void testTrimMesssagesNotTrimmed(){
List messages=Lists.newArrayList("a","b","c");
Assert.assertEquals(messages,JIRAService.trimMessages(messages));
}
APIUtilityVerifier EqualityVerifier
@Test public void testTrimMesssagesTrimmed(){
List messages=Lists.newArrayList();
for (int i=0; i < JIRAService.MAX_MESSAGES + 1; i++) {
messages.add(String.valueOf(i));
}
List expected=Lists.newArrayList(messages);
expected.remove(0);
expected.add(0,JIRAService.TRIMMED_MESSAGE);
Assert.assertEquals(expected,JIRAService.trimMessages(messages));
}
InternalCallVerifier EqualityVerifier
@Test public void testFormatBuildTagPositive() throws Throwable {
BuildInfo buildInfo=JIRAService.formatBuildTag("abc-123");
Assert.assertEquals("abc/123",buildInfo.getFormattedBuildTag());
Assert.assertEquals("abc",buildInfo.getBuildName());
buildInfo=JIRAService.formatBuildTag("PreCommit-HIVE-TRUNK-Build-1115");
Assert.assertEquals("PreCommit-HIVE-TRUNK-Build/1115",buildInfo.getFormattedBuildTag());
Assert.assertEquals("PreCommit-HIVE-TRUNK-Build",buildInfo.getBuildName());
}
Class: org.apache.hive.ptest.execution.TestLocalCommand InternalCallVerifier EqualityVerifier
@Test public void testFailure() throws Exception {
LocalCommand.CollectPolicy output=new LocalCommand.CollectPolicy();
LocalCommand command=(new LocalCommandFactory(LOG)).create(output,"exit 1");
Assert.assertEquals(1,command.getExitCode());
Assert.assertEquals(1,command.getExitCode());
Assert.assertEquals("",output.getOutput());
}
InternalCallVerifier EqualityVerifier
@Test public void testSuccess() throws Exception {
LocalCommand.CollectPolicy output=new LocalCommand.CollectPolicy();
LocalCommand command=(new LocalCommandFactory(LOG)).create(output,"echo 123");
Thread.sleep(500L);
Assert.assertEquals(0,command.getExitCode());
Assert.assertEquals(0,command.getExitCode());
Assert.assertEquals("123",Strings.nullToEmpty(output.getOutput()).trim());
}
Class: org.apache.hive.ptest.execution.TestPhase EqualityVerifier
@Test public void testRsyncFromLocalToRemoteInstancesWithFailureOne() throws Throwable {
rsyncCommandExecutor.putFailure("local remote",1);
phase=new Phase(hostExecutors,localCommandFactory,templateDefaults,logger){
@Override public void execute() throws Exception {
rsyncFromLocalToRemoteInstances("local","remote");
}
}
;
phase.execute();
Approvals.verify(getExecutedCommands());
Assert.assertEquals(1,hostExecutor.remainingDrones());
}
EqualityVerifier
@Test public void testExecInstancesWithFailure() throws Throwable {
sshCommandExecutor.putFailure("echo",Constants.EXIT_CODE_UNKNOWN);
phase=new Phase(hostExecutors,localCommandFactory,templateDefaults,logger){
@Override public void execute() throws Exception {
execInstances("echo");
}
}
;
phase.execute();
Approvals.verify(getExecutedCommands());
Assert.assertEquals(1,hostExecutor.remainingDrones());
}
EqualityVerifier
@Test public void testExecHostsWithFailure() throws Throwable {
sshCommandExecutor.putFailure("echo",Constants.EXIT_CODE_UNKNOWN);
phase=new Phase(hostExecutors,localCommandFactory,templateDefaults,logger){
@Override public void execute() throws Exception {
execHosts("echo");
}
}
;
phase.execute();
Approvals.verify(getExecutedCommands());
Assert.assertEquals(1,hostExecutor.remainingDrones());
}
EqualityVerifier
@Test public void testRsyncFromLocalToRemoteInstancesWithFailureUnknown() throws Throwable {
rsyncCommandExecutor.putFailure("local remote",Constants.EXIT_CODE_UNKNOWN);
phase=new Phase(hostExecutors,localCommandFactory,templateDefaults,logger){
@Override public void execute() throws Exception {
rsyncFromLocalToRemoteInstances("local","remote");
}
}
;
phase.execute();
Approvals.verify(getExecutedCommands());
Assert.assertEquals(1,hostExecutor.remainingDrones());
}
EqualityVerifier
@Test public void testExecLocallySucceeds() throws Throwable {
phase=new Phase(hostExecutors,localCommandFactory,templateDefaults,logger){
@Override public void execute() throws Exception {
execLocally("local");
}
}
;
phase.execute();
List commands=localCommandFactory.getCommands();
Assert.assertEquals(1,commands.size());
Assert.assertEquals("local",commands.get(0));
}
Class: org.apache.hive.ptest.execution.TestReportParser InternalCallVerifier EqualityVerifier
@Test public void test() throws Exception {
File reportDir=new File("src/test/resources/test-outputs");
for ( File file : reportDir.listFiles()) {
if (file.isFile()) {
if (file.getName().endsWith(".xml")) {
Files.copy(file,new File(baseDir,"TEST-" + file.getName()));
}
else {
Files.copy(file,new File(baseDir,file.getName()));
}
}
}
JUnitReportParser parser=new JUnitReportParser(LOG,baseDir);
Assert.assertEquals(3,parser.getFailedTests().size());
Assert.assertEquals(Sets.newHashSet("org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_skewjoin_union_remove_1","org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_union_remove_9","org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_skewjoin"),parser.getFailedTests());
Assert.assertEquals(Sets.newHashSet("org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_shutdown","org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_binary_constant","org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_skewjoin_union_remove_1","org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_udf_regexp_extract","org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_index_auth","org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_auto_join17","org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_authorization_2","org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_load_dyn_part3","org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_index_bitmap2","org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_groupby_rollup1","org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_bucketcontext_3","org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_ppd_join","org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_rcfile_lazydecompress","org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_notable_alias1","org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_union_remove_9","org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_skewjoin","org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_multi_insert_gby"),parser.getExecutedTests());
}
Class: org.apache.hive.ptest.execution.TestTestCheckPhase BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testJavaTests() throws Exception {
URL url=this.getClass().getResource("/HIVE-10761.6.patch");
File patchFile=new File(url.getFile());
Set addedTests=new HashSet();
phase=new TestCheckPhase(hostExecutors,localCommandFactory,templateDefaults,patchFile,logger,addedTests);
phase.execute();
Assert.assertEquals(addedTests.size(),3);
Assert.assertTrue(addedTests.contains("TestCodahaleMetrics.java"));
Assert.assertTrue(addedTests.contains("TestMetaStoreMetrics.java"));
Assert.assertTrue(addedTests.contains("TestLegacyMetrics.java"));
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testQTests() throws Exception {
URL url=this.getClass().getResource("/HIVE-11271.4.patch");
File patchFile=new File(url.getFile());
Set addedTests=new HashSet();
phase=new TestCheckPhase(hostExecutors,localCommandFactory,templateDefaults,patchFile,logger,addedTests);
phase.execute();
Assert.assertEquals(addedTests.size(),1);
Assert.assertTrue(addedTests.contains("unionall_unbalancedppd.q"));
}
EqualityVerifier
@Test public void testNoTests() throws Exception {
URL url=this.getClass().getResource("/HIVE-9377.1.patch");
File patchFile=new File(url.getFile());
Set addedTests=new HashSet();
phase=new TestCheckPhase(hostExecutors,localCommandFactory,templateDefaults,patchFile,logger,addedTests);
phase.execute();
Assert.assertEquals(addedTests.size(),0);
}
EqualityVerifier
@Test public void testRemoveTest() throws Exception {
URL url=this.getClass().getResource("/remove-test.patch");
File patchFile=new File(url.getFile());
Set addedTests=new HashSet();
phase=new TestCheckPhase(hostExecutors,localCommandFactory,templateDefaults,patchFile,logger,addedTests);
phase.execute();
Assert.assertEquals(addedTests.size(),0);
}
Class: org.apache.hive.ptest.execution.conf.TestQFileTestBatch BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testParallel() throws Exception {
QFileTestBatch batch=new QFileTestBatch("testcase",DRIVER,QUERY_FILES_PROPERTY,tests,true);
Assert.assertTrue(batch.isParallel());
Assert.assertEquals(DRIVER,batch.getDriver());
Assert.assertEquals(Joiner.on("-").join(DRIVER,"a","b","c"),batch.getName());
Assert.assertEquals(String.format("-Dtestcase=%s -D%s=a,b,c",DRIVER,QUERY_FILES_PROPERTY),batch.getTestArguments());
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testMoreThanThreeTests() throws Exception {
Assert.assertTrue(tests.add("d"));
QFileTestBatch batch=new QFileTestBatch("testcase",DRIVER,QUERY_FILES_PROPERTY,tests,true);
Assert.assertEquals(Joiner.on("-").join(DRIVER,"a","b","c","and","1","more"),batch.getName());
}
Class: org.apache.hive.ptest.execution.conf.TestTestConfiguration APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testGettersSetters() throws Exception {
ExecutionContextConfiguration execConf=ExecutionContextConfiguration.fromInputStream(Resources.getResource("test-configuration.properties").openStream());
TestConfiguration conf=TestConfiguration.fromInputStream(Resources.getResource("test-configuration.properties").openStream(),LOG);
Set expectedHosts=Sets.newHashSet(new Host("localhost","hiveptest",new String[]{"/home/hiveptest"},2));
ExecutionContext executionContext=execConf.getExecutionContextProvider().createExecutionContext();
Assert.assertEquals(expectedHosts,executionContext.getHosts());
Assert.assertEquals("/tmp/hive-ptest-units/working/dir",execConf.getWorkingDirectory());
Assert.assertEquals("/etc/hiveptest/conf",execConf.getProfileDirectory());
Assert.assertEquals("/tmp/hive-ptest-units/working/dir/logs",execConf.getGlobalLogDirectory());
Assert.assertEquals("/home/brock/.ssh/id_rsa",executionContext.getPrivateKey());
Assert.assertEquals("git://github.com/apache/hive.git",conf.getRepository());
Assert.assertEquals("apache-github",conf.getRepositoryName());
Assert.assertEquals("trunk",conf.getBranch());
Assert.assertEquals("/tmp/hive-ptest-units/working/dir/working",executionContext.getLocalWorkingDirectory());
Assert.assertEquals("-Dtest.continue.on.failure=true -Dtest.silent=false",conf.getAntArgs());
Assert.assertEquals("hadoop-1,hadoop-2",conf.getAdditionalProfiles());
Assert.assertNotNull(conf.toString());
Assert.assertEquals("",conf.getPatch());
conf.setPatch("Patch");
Assert.assertEquals("Patch",conf.getPatch());
conf.setRepository("Repository");
Assert.assertEquals("Repository",conf.getRepository());
conf.setRepositoryName("RepositoryName");
Assert.assertEquals("RepositoryName",conf.getRepositoryName());
conf.setBranch("Branch");
Assert.assertEquals("Branch",conf.getBranch());
conf.setAntArgs("AntArgs");
Assert.assertEquals("AntArgs",conf.getAntArgs());
}
APIUtilityVerifier EqualityVerifier
@Test public void testContext() throws Exception {
Properties properties=new Properties();
properties.load(Resources.getResource("test-configuration.properties").openStream());
Context context=new Context(Maps.fromProperties(properties));
Assert.assertEquals(context.getParameters(),(new TestConfiguration(context,LOG)).getContext().getParameters());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testPTest() throws Exception {
Host testHost=new Host("test","test",new String[1],1);
Set testHosts=new HashSet();
testHosts.add(testHost);
TestConfiguration conf=TestConfiguration.fromInputStream(Resources.getResource("test-configuration.properties").openStream(),LOG);
ExecutionContext execContext=new ExecutionContext(null,testHosts,"test",null);
PTest.Builder mPTestBuilder=new PTest.Builder();
PTest ptest=mPTestBuilder.build(conf,execContext,"1234",baseDir.newFolder(),null,null,null,null);
Map templateDefaults=ptest.getTemplateDefaults();
Assert.assertEquals("git://github.com/apache/hive.git",templateDefaults.get("repository"));
Assert.assertEquals("apache-github",templateDefaults.get("repositoryName"));
Assert.assertEquals("trunk",templateDefaults.get("branch"));
Assert.assertEquals("-Dtest.continue.on.failure=true -Dtest.silent=false",templateDefaults.get("antArgs"));
Assert.assertEquals("hadoop-1,hadoop-2",templateDefaults.get("additionalProfiles"));
}
Class: org.apache.hive.ptest.execution.conf.TestTestParser InternalCallVerifier EqualityVerifier
@Test public void testParsePropertyFile() throws Exception {
context.put("unitTests.directories","build/1 build/2");
context.put("unitTests.include","TestA TestB");
context.put("unitTests.isolate","TestB");
context.put("qFileTests","f");
context.put("qFileTests.propertyFiles.prop","props" + File.separator + "normal.properties");
context.put("qFileTest.f.driver",DRIVER);
context.put("qFileTest.f.directory","qfiles");
context.put("qFileTest.f.include","included");
context.put("qFileTest.f.isolate","isolated");
context.put("qFileTest.f.exclude","excluded");
context.put("qFileTest.f.queryFilesProperty","qfile");
context.put("qFileTest.f.groups.included","prop.${normal.one.group} prop.${normal.two.group} prop.${isolated.group}");
context.put("qFileTest.f.groups.isolated","prop.${isolated.group}");
context.put("qFileTest.f.groups.excluded","prop.${excluded.group}");
testParser=new TestParser(context,"testcase",workingDirectory,LOG);
List testBatches=testParser.parse().get();
Assert.assertEquals(4,testBatches.size());
}
InternalCallVerifier EqualityVerifier
@Test public void testParseWithExcludes() throws Exception {
context.put("unitTests.directories","build/1 build/2");
context.put("unitTests.exclude","TestA");
context.put("unitTests.isolate","TestB");
context.put("qFileTests","f");
context.put("qFileTest.f.driver",DRIVER);
context.put("qFileTest.f.directory","qfiles");
context.put("qFileTest.f.exclude","excluded");
context.put("qFileTest.f.queryFilesProperty","qfile");
context.put("qFileTest.f.isolate","isolated");
context.put("qFileTest.f.groups.excluded","excluded.q");
context.put("qFileTest.f.groups.isolated","isolated.q");
testParser=new TestParser(context,"testcase",workingDirectory,LOG);
List testBatches=testParser.parse().get();
Assert.assertEquals(4,testBatches.size());
}
InternalCallVerifier EqualityVerifier
@Test public void testParseWithIncludes() throws Exception {
context.put("unitTests.directories","build/1 build/2");
context.put("unitTests.include","TestA TestB");
context.put("unitTests.isolate","TestB");
context.put("qFileTests","f");
context.put("qFileTest.f.driver",DRIVER);
context.put("qFileTest.f.directory","qfiles");
context.put("qFileTest.f.include","included");
context.put("qFileTest.f.isolate","isolated");
context.put("qFileTest.f.queryFilesProperty","qfile");
context.put("qFileTest.f.groups.included","included.q isolated.q");
context.put("qFileTest.f.groups.isolated","isolated.q");
testParser=new TestParser(context,"testcase",workingDirectory,LOG);
List testBatches=testParser.parse().get();
Assert.assertEquals(4,testBatches.size());
}
Class: org.apache.hive.ptest.execution.ssh.TestRSyncCommandExecutor BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testShutdownDuringWaitFor() throws Exception {
LocalCommand localCommand=mock(LocalCommand.class);
localCommandFactory.setInstance(localCommand);
final RSyncCommandExecutor executor=new RSyncCommandExecutor(LOG,1,localCommandFactory);
Assert.assertFalse(executor.isShutdown());
when(localCommand.getExitCode()).thenAnswer(new Answer(){
@Override public Integer answer( InvocationOnMock invocation) throws Throwable {
executor.shutdownNow();
return Constants.EXIT_CODE_UNKNOWN;
}
}
);
RSyncCommand command=new RSyncCommand(executor,"privateKey","user","host",1,"local","remote",RSyncCommand.Type.FROM_LOCAL);
executor.execute(command);
Assert.assertTrue(executor.isShutdown());
Assert.assertEquals(Constants.EXIT_CODE_UNKNOWN,command.getExitCode());
if (command.getException() != null) {
throw new Exception("Unexpected exception during execution",command.getException());
}
verify(localCommand,never()).kill();
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testShutdownBeforeWaitFor() throws Exception {
LocalCommand localCommand=mock(LocalCommand.class);
localCommandFactory.setInstance(localCommand);
RSyncCommandExecutor executor=new RSyncCommandExecutor(LOG,1,localCommandFactory);
Assert.assertFalse(executor.isShutdown());
executor.shutdownNow();
RSyncCommand command=new RSyncCommand(executor,"privateKey","user","host",1,"local","remote",RSyncCommand.Type.FROM_LOCAL);
executor.execute(command);
Assert.assertTrue(executor.isShutdown());
Assert.assertEquals(Constants.EXIT_CODE_UNKNOWN,command.getExitCode());
if (command.getException() != null) {
throw new Exception("Unexpected exception during execution",command.getException());
}
verify(localCommand,times(1)).kill();
}
Class: org.apache.hive.ptest.execution.ssh.TestSSHCommandExecutor BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testShutdownBeforeWaitFor() throws Exception {
LocalCommand localCommand=mock(LocalCommand.class);
localCommandFactory.setInstance(localCommand);
SSHCommandExecutor executor=new SSHCommandExecutor(LOG,localCommandFactory,"-o StrictHostKeyChecking=no");
Assert.assertFalse(executor.isShutdown());
executor.shutdownNow();
SSHCommand command=new SSHCommand(executor,"privateKey","user","host",1,"whoami",true);
executor.execute(command);
Assert.assertTrue(executor.isShutdown());
Assert.assertEquals(Constants.EXIT_CODE_UNKNOWN,command.getExitCode());
if (command.getException() != null) {
throw new Exception("Unexpected exception during execution",command.getException());
}
verify(localCommand,times(1)).kill();
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testShutdownDuringWaitFor() throws Exception {
LocalCommand localCommand=mock(LocalCommand.class);
localCommandFactory.setInstance(localCommand);
final SSHCommandExecutor executor=new SSHCommandExecutor(LOG,localCommandFactory,"-o StrictHostKeyChecking=no");
Assert.assertFalse(executor.isShutdown());
when(localCommand.getExitCode()).thenAnswer(new Answer(){
@Override public Integer answer( InvocationOnMock invocation) throws Throwable {
executor.shutdownNow();
return Constants.EXIT_CODE_UNKNOWN;
}
}
);
SSHCommand command=new SSHCommand(executor,"privateKey","user","host",1,"whoami",true);
executor.execute(command);
Assert.assertTrue(executor.isShutdown());
Assert.assertEquals(Constants.EXIT_CODE_UNKNOWN,command.getExitCode());
if (command.getException() != null) {
throw new Exception("Unexpected exception during execution",command.getException());
}
verify(localCommand,never()).kill();
}
Class: org.apache.hive.service.cli.CLIServiceTest APIUtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* Test async execution of a well-formed and a malformed query with different long polling durations
* - Test malformed query with default long polling timeout
* - Test well-formed query with default long polling timeout
* - Test well-formed query with long polling timeout set to 0
* - Test well-formed query with long polling timeout set to 500 millis
* - Test well-formed query cancellation
* @throws Exception
*/
@Test public void testExecuteStatementAsync() throws Exception {
Map confOverlay=new HashMap();
String tableName="TEST_EXEC_ASYNC";
String columnDefinitions="(ID STRING)";
String queryString;
SessionHandle sessionHandle=setupTestData(tableName,columnDefinitions,confOverlay);
assertNotNull(sessionHandle);
OperationState state=null;
OperationHandle opHandle;
OperationStatus opStatus=null;
queryString="SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname + " = false";
opHandle=client.executeStatement(sessionHandle,queryString,confOverlay);
client.closeOperation(opHandle);
long longPollingTimeout;
longPollingTimeout=HiveConf.getTimeVar(new HiveConf(),HiveConf.ConfVars.HIVE_SERVER2_LONG_POLLING_TIMEOUT,TimeUnit.MILLISECONDS);
queryString="SELECT NON_EXISTING_COLUMN FROM " + tableName;
try {
runAsyncAndWait(sessionHandle,queryString,confOverlay,OperationState.ERROR,longPollingTimeout);
}
catch ( HiveSQLException e) {
}
queryString="CREATE TABLE NON_EXISTING_TAB (ID STRING) location 'invalid://localhost:10000/a/b/c'";
opStatus=runAsyncAndWait(sessionHandle,queryString,confOverlay,OperationState.ERROR,longPollingTimeout);
assertEquals(opStatus.getOperationException().getSQLState(),"08S01");
assertEquals(opStatus.getOperationException().getErrorCode(),1);
queryString="SELECT ID+1 FROM " + tableName;
runAsyncAndWait(sessionHandle,queryString,confOverlay,OperationState.FINISHED,longPollingTimeout);
longPollingTimeout=0;
queryString="SELECT ID+1 FROM " + tableName;
runAsyncAndWait(sessionHandle,queryString,confOverlay,OperationState.FINISHED,longPollingTimeout);
longPollingTimeout=500;
queryString="SELECT ID+1 FROM " + tableName;
runAsyncAndWait(sessionHandle,queryString,confOverlay,OperationState.FINISHED,longPollingTimeout);
queryString="SELECT ID+1 FROM " + tableName;
opHandle=client.executeStatementAsync(sessionHandle,queryString,confOverlay);
System.out.println("Cancelling " + opHandle);
client.cancelOperation(opHandle);
state=client.getOperationStatus(opHandle).getState();
System.out.println(opHandle + " after cancelling, state= " + state);
assertEquals("Query should be cancelled",OperationState.CANCELED,state);
queryString="DROP TABLE " + tableName;
client.executeStatement(sessionHandle,queryString,confOverlay);
client.closeSession(sessionHandle);
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testGetFunctions() throws Exception {
SessionHandle sessionHandle=client.openSession("tom","password");
assertNotNull(sessionHandle);
OperationHandle opHandle=client.getFunctions(sessionHandle,null,null,"*");
TableSchema schema=client.getResultSetMetadata(opHandle);
ColumnDescriptor columnDesc=schema.getColumnDescriptorAt(0);
assertEquals("FUNCTION_CAT",columnDesc.getName());
assertEquals(Type.STRING_TYPE,columnDesc.getType());
columnDesc=schema.getColumnDescriptorAt(1);
assertEquals("FUNCTION_SCHEM",columnDesc.getName());
assertEquals(Type.STRING_TYPE,columnDesc.getType());
columnDesc=schema.getColumnDescriptorAt(2);
assertEquals("FUNCTION_NAME",columnDesc.getName());
assertEquals(Type.STRING_TYPE,columnDesc.getType());
columnDesc=schema.getColumnDescriptorAt(3);
assertEquals("REMARKS",columnDesc.getName());
assertEquals(Type.STRING_TYPE,columnDesc.getType());
columnDesc=schema.getColumnDescriptorAt(4);
assertEquals("FUNCTION_TYPE",columnDesc.getName());
assertEquals(Type.INT_TYPE,columnDesc.getType());
columnDesc=schema.getColumnDescriptorAt(5);
assertEquals("SPECIFIC_NAME",columnDesc.getName());
assertEquals(Type.STRING_TYPE,columnDesc.getType());
client.closeOperation(opHandle);
client.closeSession(sessionHandle);
}
UtilityVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* Test per statement configuration overlay.
* Create a table using hiveconf: var substitution, with the conf var passed
* via confOverlay.Verify the confOverlay works for the query and does set the
* value in the session configuration
* @throws Exception
*/
@Test public void testConfOverlay() throws Exception {
SessionHandle sessionHandle=client.openSession("tom","password",new HashMap());
assertNotNull(sessionHandle);
String tabName="TEST_CONF_EXEC";
String tabNameVar="tabNameVar";
String setLockMgr="SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname + " = false";
OperationHandle opHandle=client.executeStatement(sessionHandle,setLockMgr,null);
client.closeOperation(opHandle);
String dropTable="DROP TABLE IF EXISTS " + tabName;
opHandle=client.executeStatement(sessionHandle,dropTable,null);
client.closeOperation(opHandle);
Map confOverlay=new HashMap();
confOverlay.put(tabNameVar,tabName);
String createTab="CREATE TABLE ${hiveconf:" + tabNameVar + "} (id int)";
opHandle=client.executeStatement(sessionHandle,createTab,confOverlay);
assertNotNull(opHandle);
assertEquals("Query should be finished",OperationState.FINISHED,client.getOperationStatus(opHandle).getState());
client.closeOperation(opHandle);
String selectTab="SELECT * FROM " + tabName;
opHandle=client.executeStatement(sessionHandle,selectTab,null);
assertNotNull(opHandle);
assertEquals("Query should be finished",OperationState.FINISHED,client.getOperationStatus(opHandle).getState());
client.closeOperation(opHandle);
selectTab="SELECT * FROM ${hiveconf:" + tabNameVar + "}";
try {
opHandle=client.executeStatement(sessionHandle,selectTab,null);
fail("Query should fail");
}
catch ( HiveSQLException e) {
}
dropTable="DROP TABLE IF EXISTS " + tabName;
opHandle=client.executeStatement(sessionHandle,dropTable,null);
client.closeOperation(opHandle);
client.closeSession(sessionHandle);
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* Test the blocking execution of a query
* @throws Exception
*/
@Test public void testExecuteStatement() throws Exception {
HashMap confOverlay=new HashMap();
SessionHandle sessionHandle=client.openSession("tom","password",new HashMap());
assertNotNull(sessionHandle);
OperationHandle opHandle;
String queryString="SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname + " = false";
opHandle=client.executeStatement(sessionHandle,queryString,confOverlay);
client.closeOperation(opHandle);
queryString="DROP TABLE IF EXISTS TEST_EXEC";
opHandle=client.executeStatement(sessionHandle,queryString,confOverlay);
client.closeOperation(opHandle);
queryString="CREATE TABLE TEST_EXEC(ID STRING)";
opHandle=client.executeStatement(sessionHandle,queryString,confOverlay);
client.closeOperation(opHandle);
queryString="SELECT ID+1 FROM TEST_EXEC";
opHandle=client.executeStatement(sessionHandle,queryString,confOverlay);
assertEquals("Query should be finished",OperationState.FINISHED,client.getOperationStatus(opHandle).getState());
client.closeOperation(opHandle);
queryString="DROP TABLE IF EXISTS TEST_EXEC";
opHandle=client.executeStatement(sessionHandle,queryString,confOverlay);
client.closeOperation(opHandle);
client.closeSession(sessionHandle);
}
Class: org.apache.hive.service.cli.TestColumn InternalCallVerifier EqualityVerifier
@Test public void testBooleanValues(){
Column boolColumn=new Column(Type.BOOLEAN_TYPE);
boolColumn.addValue(Type.BOOLEAN_TYPE,true);
boolColumn.addValue(Type.BOOLEAN_TYPE,false);
assertEquals(Type.BOOLEAN_TYPE,boolColumn.getType());
assertEquals(2,boolColumn.size());
assertEquals(true,boolColumn.get(0));
assertEquals(false,boolColumn.get(1));
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testAllIntegerTypeValues(){
Map integerTypesAndValues=new LinkedHashMap();
integerTypesAndValues.put(Type.TINYINT_TYPE,Arrays.asList(Byte.MIN_VALUE,Byte.MAX_VALUE));
integerTypesAndValues.put(Type.SMALLINT_TYPE,Arrays.asList(Short.MIN_VALUE,Short.MIN_VALUE));
integerTypesAndValues.put(Type.INT_TYPE,Arrays.asList(Integer.MIN_VALUE,Integer.MAX_VALUE));
integerTypesAndValues.put(Type.BIGINT_TYPE,Arrays.asList(Long.MIN_VALUE,Long.MAX_VALUE));
for ( Map.Entry entry : integerTypesAndValues.entrySet()) {
Type type=(Type)entry.getKey();
List values=(List)entry.getValue();
Column c=new Column(type);
for ( Object v : values) {
c.addValue(type,v);
}
assertEquals(type,c.getType());
assertEquals(values.size(),c.size());
for (int i=0; i < c.size(); i++) {
assertEquals(values.get(i),c.get(i));
}
}
}
InternalCallVerifier EqualityVerifier
@Test public void testStringValues(){
Column stringColumn=new Column(Type.STRING_TYPE);
stringColumn.addValue(Type.STRING_TYPE,"12abc456");
stringColumn.addValue(Type.STRING_TYPE,"~special$&string");
assertEquals(Type.STRING_TYPE,stringColumn.getType());
assertEquals(2,stringColumn.size());
assertEquals("12abc456",stringColumn.get(0));
assertEquals("~special$&string",stringColumn.get(1));
}
InternalCallVerifier EqualityVerifier
@Test public void testFloatAndDoubleValues(){
Column floatColumn=new Column(Type.FLOAT_TYPE);
floatColumn.addValue(Type.FLOAT_TYPE,1.1f);
floatColumn.addValue(Type.FLOAT_TYPE,2.033f);
assertEquals(Type.DOUBLE_TYPE,floatColumn.getType());
assertEquals(2,floatColumn.size());
assertEquals(1.1,floatColumn.get(0));
assertEquals(2.033,floatColumn.get(1));
Column doubleColumn=new Column(Type.DOUBLE_TYPE);
doubleColumn.addValue(Type.DOUBLE_TYPE,1.1);
doubleColumn.addValue(Type.DOUBLE_TYPE,2.033);
assertEquals(Type.DOUBLE_TYPE,doubleColumn.getType());
assertEquals(2,doubleColumn.size());
assertEquals(1.1,doubleColumn.get(0));
assertEquals(2.033,doubleColumn.get(1));
}
InternalCallVerifier EqualityVerifier
@Test public void testBinaryValues(){
Column binaryColumn=new Column(Type.BINARY_TYPE);
binaryColumn.addValue(Type.BINARY_TYPE,new byte[]{-1,0,3,4});
assertEquals(Type.BINARY_TYPE,binaryColumn.getType());
assertEquals(1,binaryColumn.size());
assertArrayEquals(new byte[]{-1,0,3,4},(byte[])binaryColumn.get(0));
}
Class: org.apache.hive.service.cli.TestHiveSQLException APIUtilityVerifier IdentityVerifier EqualityVerifier HybridVerifier
/**
* Tests the conversion between the exception text with the simple cause and the
* Throwable object
*/
@Test public void testExceptionMarshalling() throws Exception {
Exception ex1=createException();
ex1.initCause(createSimpleCause());
Throwable ex=HiveSQLException.toCause(HiveSQLException.toString(ex1));
Assert.assertSame(RuntimeException.class,ex.getClass());
Assert.assertEquals("exception1",ex.getMessage());
Assert.assertSame(UnsupportedOperationException.class,ex.getCause().getClass());
Assert.assertEquals("exception2",ex.getCause().getMessage());
}
APIUtilityVerifier IdentityVerifier EqualityVerifier HybridVerifier
/**
* Tests the conversion between the exception text with nested cause and
* the Throwable object
*/
@Test public void testNestedException(){
Exception ex1=createException();
ex1.initCause(createNestedCause());
Throwable ex=HiveSQLException.toCause(HiveSQLException.toString(ex1));
Assert.assertSame(RuntimeException.class,ex.getClass());
Assert.assertEquals("exception1",ex.getMessage());
Assert.assertSame(UnsupportedOperationException.class,ex.getCause().getClass());
Assert.assertEquals("exception2",ex.getCause().getMessage());
Assert.assertSame(Exception.class,ex.getCause().getCause().getClass());
Assert.assertEquals("exception3",ex.getCause().getCause().getMessage());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
/**
* Tests the conversion from a HiveSQLException exception to the TStatus object
*/
@Test public void testHiveSQLExceptionToTStatus(){
String expectedMessage="reason";
String expectedSqlState="sqlState";
int expectedVendorCode=10;
Exception ex1=new HiveSQLException(expectedMessage,expectedSqlState,expectedVendorCode,createSimpleCause());
TStatus status=HiveSQLException.toTStatus(ex1);
Assert.assertEquals(TStatusCode.ERROR_STATUS,status.getStatusCode());
Assert.assertEquals(expectedSqlState,status.getSqlState());
Assert.assertEquals(expectedMessage,status.getErrorMessage());
Assert.assertEquals(HiveSQLException.toString(ex1),status.getInfoMessages());
}
APIUtilityVerifier IdentityVerifier EqualityVerifier HybridVerifier
/**
* Tests the conversion of the exception from anonymous class
*/
@Test public void testExceptionFromAnonymousClass(){
Dummy d=new Dummy(){
public void testExceptionConversion(){
Exception ex1=createException();
ex1.initCause(createSimpleCause());
Throwable ex=HiveSQLException.toCause(HiveSQLException.toString(ex1));
Assert.assertSame(RuntimeException.class,ex.getClass());
Assert.assertEquals("exception1",ex.getMessage());
Assert.assertSame(UnsupportedOperationException.class,ex.getCause().getClass());
Assert.assertEquals("exception2",ex.getCause().getMessage());
}
}
;
d.testExceptionConversion();
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
/**
* Tests the conversion from a regular exception to the TStatus object
*/
@Test public void testExceptionToTStatus(){
Exception ex1=createException();
ex1.initCause(createSimpleCause());
TStatus status=HiveSQLException.toTStatus(ex1);
Assert.assertEquals(TStatusCode.ERROR_STATUS,status.getStatusCode());
Assert.assertEquals(ex1.getMessage(),status.getErrorMessage());
Assert.assertEquals(HiveSQLException.toString(ex1),status.getInfoMessages());
}
APIUtilityVerifier EqualityVerifier
/**
* Tests the conversion of the exception that the class type of one of the causes
* doesn't exist. The stack trace text is generated on the server and passed to JDBC
* client. It's possible that some cause types don't exist on the client and HiveSQLException
* can't convert them and use RunTimeException instead.
*/
@Test public void testExceptionWithMissingTypeOnClient(){
Exception ex1=new UnsupportedOperationException();
ex1.initCause(createSimpleCause());
List details=HiveSQLException.toString(ex1);
String[] tokens=details.get(0).split(":");
tokens[0]="*DummyException";
details.set(0,StringUtils.join(tokens,":"));
Throwable ex=HiveSQLException.toCause(details);
Assert.assertEquals(RuntimeException.class,ex.getClass());
}
APIUtilityVerifier IdentityVerifier EqualityVerifier HybridVerifier
/**
* Tests the conversion of the exception with unknown source
*/
@Test public void testExceptionWithUnknownSource(){
Exception ex1=createException();
ex1.initCause(createSimpleCause());
List details=HiveSQLException.toString(ex1);
String[] tokens=details.get(1).split(":");
tokens[2]=null;
tokens[3]="-1";
details.set(1,StringUtils.join(tokens,":"));
Throwable ex=HiveSQLException.toCause(details);
Assert.assertSame(RuntimeException.class,ex.getClass());
Assert.assertEquals("exception1",ex.getMessage());
Assert.assertSame(UnsupportedOperationException.class,ex.getCause().getClass());
Assert.assertEquals("exception2",ex.getCause().getMessage());
}
Class: org.apache.hive.service.cli.TestRetryingThriftCLIServiceClient UtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testRetryBehaviour() throws Exception {
HiveConf hiveConf=new HiveConf();
hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST,"localhost");
hiveConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT,15000);
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS,false);
hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION,HiveAuthFactory.AuthTypes.NONE.toString());
hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE,"binary");
hiveConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_RETRY_LIMIT,3);
hiveConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_CONNECTION_RETRY_LIMIT,3);
hiveConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_ASYNC_EXEC_THREADS,10);
hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT,"1s");
final HiveServer2 server=new HiveServer2();
server.init(hiveConf);
server.start();
Thread.sleep(5000);
System.out.println("## HiveServer started");
hiveConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT,17000);
try {
CLIServiceClient cliServiceClient=RetryingThriftCLIServiceClientTest.newRetryingCLIServiceClient(hiveConf);
fail("Expected to throw exception for invalid port");
}
catch ( HiveSQLException sqlExc) {
assertTrue(sqlExc.getCause() instanceof TTransportException);
assertTrue(sqlExc.getMessage().contains("3"));
}
hiveConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT,15000);
CLIServiceClient cliServiceClient=RetryingThriftCLIServiceClientTest.newRetryingCLIServiceClient(hiveConf);
System.out.println("## Created client");
server.stop();
Thread.sleep(5000);
try {
Map confOverlay=new HashMap();
RetryingThriftCLIServiceClientTest.handlerInst.callCount=0;
RetryingThriftCLIServiceClientTest.handlerInst.connectCount=0;
SessionHandle session=cliServiceClient.openSession("anonymous","anonymous");
}
catch ( HiveSQLException exc) {
exc.printStackTrace();
assertTrue(exc.getCause() instanceof TException);
assertEquals(1,RetryingThriftCLIServiceClientTest.handlerInst.callCount);
assertEquals(3,RetryingThriftCLIServiceClientTest.handlerInst.connectCount);
}
}
Class: org.apache.hive.service.cli.operation.TestOperationLoggingAPIWithMr APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testFetchResultsOfLogAsync() throws Exception {
OperationHandle operationHandle=client.executeStatementAsync(sessionHandle,sql,null);
boolean isQueryRunning=true;
long pollTimeout=System.currentTimeMillis() + 100000;
OperationStatus opStatus;
OperationState state=null;
RowSet rowSetAccumulated=null;
StringBuilder logs=new StringBuilder();
while (isQueryRunning) {
if (System.currentTimeMillis() > pollTimeout) {
break;
}
opStatus=client.getOperationStatus(operationHandle);
Assert.assertNotNull(opStatus);
state=opStatus.getState();
rowSetAccumulated=client.fetchResults(operationHandle,FetchOrientation.FETCH_NEXT,2000,FetchType.LOG);
for ( Object[] row : rowSetAccumulated) {
logs.append(row[0]);
}
if (state == OperationState.CANCELED || state == OperationState.CLOSED || state == OperationState.FINISHED || state == OperationState.ERROR) {
isQueryRunning=false;
}
Thread.sleep(10);
}
Assert.assertEquals("Query should be finished",OperationState.FINISHED,state);
verifyFetchedLogPost(logs.toString(),expectedLogsVerbose,true);
RowSet rowSet=client.fetchResults(operationHandle,FetchOrientation.FETCH_FIRST,2000,FetchType.LOG);
verifyFetchedLog(rowSet,expectedLogsVerbose);
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testFetchResultsOfLogWithOrientation() throws Exception {
OperationHandle operationHandle=client.executeStatement(sessionHandle,sql,null);
RowSet rowSetLog=client.fetchResults(operationHandle,FetchOrientation.FETCH_FIRST,1000,FetchType.LOG);
int expectedLogLength=rowSetLog.numRows();
OperationHandle operationHandleWithOrientation=client.executeStatement(sessionHandle,sql,null);
RowSet rowSetLogWithOrientation;
int logLength=0;
int maxRows=calculateProperMaxRows(expectedLogLength);
do {
rowSetLogWithOrientation=client.fetchResults(operationHandleWithOrientation,FetchOrientation.FETCH_NEXT,maxRows,FetchType.LOG);
logLength+=rowSetLogWithOrientation.numRows();
}
while (rowSetLogWithOrientation.numRows() == maxRows);
Assert.assertEquals(expectedLogLength,logLength);
rowSetLogWithOrientation=client.fetchResults(operationHandleWithOrientation,FetchOrientation.FETCH_FIRST,1000,FetchType.LOG);
verifyFetchedLog(rowSetLogWithOrientation,expectedLogsVerbose);
}
Class: org.apache.hive.service.cli.operation.TestOperationLoggingLayout APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testSwitchLogLayout() throws Exception {
OperationHandle operationHandle=client.executeStatement(sessionHandle,sqlCntStar,null);
RowSet rowSetLog=client.fetchResults(operationHandle,FetchOrientation.FETCH_FIRST,1000,FetchType.LOG);
Iterator iter=rowSetLog.iterator();
while (iter.hasNext()) {
String row=iter.next()[0].toString();
Assert.assertEquals(true,row.matches("^(FATAL|ERROR|WARN|INFO|DEBUG|TRACE).*$"));
}
String queryString="set hive.server2.logging.operation.level=verbose";
client.executeStatement(sessionHandle,queryString,null);
operationHandle=client.executeStatement(sessionHandle,sqlCntStar,null);
rowSetLog=client.fetchResults(operationHandle,FetchOrientation.FETCH_FIRST,10,FetchType.LOG);
iter=rowSetLog.iterator();
while (iter.hasNext()) {
String row=iter.next()[0].toString();
Assert.assertEquals(true,row.matches("^\\d{2}[/](0[1-9]|1[012])[/](0[1-9]|[12][0-9]|3[01]).*$"));
}
}
Class: org.apache.hive.service.cli.session.TestPluggableHiveSessionImpl BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testSessionImpl(){
SessionHandle sessionHandle=null;
try {
sessionHandle=client.openSession("tom","password");
Assert.assertEquals(TestHiveSessionImpl.class.getName(),service.getHiveConf().getVar(HiveConf.ConfVars.HIVE_SESSION_IMPL_CLASSNAME));
Assert.assertTrue(cliService.getSessionManager().getSession(sessionHandle) instanceof TestHiveSessionImpl);
client.closeSession(sessionHandle);
}
catch ( HiveSQLException e) {
e.printStackTrace();
}
}
Class: org.apache.hive.service.cli.session.TestSessionHooks EqualityVerifier
/**
* Create session with proxy user property. Verify the effective session user
* @throws Exception
*/
@Test public void testProxyUser() throws Exception {
String connectingUser="user1";
String proxyUser=System.getProperty("user.name");
Map sessConf=new HashMap();
sessConf.put(HiveAuthFactory.HS2_PROXY_USER,proxyUser);
sessionUserName=proxyUser;
SessionHandle sessionHandle=client.openSession(connectingUser,"foobar",sessConf);
Assert.assertEquals(1,SessionHookTest.runCount.get());
client.closeSession(sessionHandle);
}
EqualityVerifier
@Test public void testSessionHook() throws Exception {
SessionHandle sessionHandle=client.openSession(sessionUserName,"foobar",Collections.emptyMap());
Assert.assertEquals(1,SessionHookTest.runCount.get());
client.closeSession(sessionHandle);
}
Class: org.apache.hive.service.cli.thrift.ThriftCLIServiceTest InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* Test synchronous query execution
* @throws Exception
*/
@Test public void testExecuteStatement() throws Exception {
Map opConf=new HashMap();
SessionHandle sessHandle=client.openSession(USERNAME,PASSWORD,opConf);
assertNotNull("Session handle should not be null",sessHandle);
String queryString="SET hive.lock.manager=" + "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager";
client.executeStatement(sessHandle,queryString,opConf);
queryString="DROP TABLE IF EXISTS TEST_EXEC_THRIFT";
client.executeStatement(sessHandle,queryString,opConf);
queryString="CREATE TABLE TEST_EXEC_THRIFT(ID STRING)";
client.executeStatement(sessHandle,queryString,opConf);
queryString="SELECT ID+1 FROM TEST_EXEC_THRIFT";
OperationHandle opHandle=client.executeStatement(sessHandle,queryString,opConf);
assertNotNull(opHandle);
OperationStatus opStatus=client.getOperationStatus(opHandle);
assertNotNull(opStatus);
OperationState state=opStatus.getState();
assertEquals("Query should be finished",OperationState.FINISHED,state);
queryString="DROP TABLE TEST_EXEC_THRIFT";
client.executeStatement(sessHandle,queryString,opConf);
client.closeSession(sessHandle);
}
IterativeVerifier InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* Test asynchronous query execution and error reporting to the client
* @throws Exception
*/
@Test public void testExecuteStatementAsync() throws Exception {
Map opConf=new HashMap();
SessionHandle sessHandle=client.openSession(USERNAME,PASSWORD,opConf);
assertNotNull("Session handle should not be null",sessHandle);
OperationHandle opHandle;
OperationStatus opStatus;
OperationState state=null;
String queryString="SET hive.lock.manager=" + "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager";
client.executeStatement(sessHandle,queryString,opConf);
queryString="DROP TABLE IF EXISTS TEST_EXEC_ASYNC_THRIFT";
client.executeStatement(sessHandle,queryString,opConf);
queryString="CREATE TABLE TEST_EXEC_ASYNC_THRIFT(ID STRING)";
client.executeStatement(sessHandle,queryString,opConf);
queryString="SELECT ID+1 FROM TEST_EXEC_ASYNC_THRIFT";
System.out.println("Will attempt to execute: " + queryString);
opHandle=client.executeStatementAsync(sessHandle,queryString,opConf);
assertNotNull(opHandle);
boolean isQueryRunning=true;
long pollTimeout=System.currentTimeMillis() + 100000;
while (isQueryRunning) {
if (System.currentTimeMillis() > pollTimeout) {
System.out.println("Polling timed out");
break;
}
opStatus=client.getOperationStatus(opHandle);
assertNotNull(opStatus);
state=opStatus.getState();
System.out.println("Current state: " + state);
if (state == OperationState.CANCELED || state == OperationState.CLOSED || state == OperationState.FINISHED || state == OperationState.ERROR) {
isQueryRunning=false;
}
Thread.sleep(1000);
}
assertEquals("Query should be finished",OperationState.FINISHED,state);
queryString="CREATE TABLE NON_EXISTING_TAB (ID STRING) location 'hdfs://localhost:10000/a/b/c'";
System.out.println("Will attempt to execute: " + queryString);
opHandle=client.executeStatementAsync(sessHandle,queryString,opConf);
assertNotNull(opHandle);
opStatus=client.getOperationStatus(opHandle);
assertNotNull(opStatus);
isQueryRunning=true;
pollTimeout=System.currentTimeMillis() + 100000;
while (isQueryRunning) {
if (System.currentTimeMillis() > pollTimeout) {
System.out.println("Polling timed out");
break;
}
state=opStatus.getState();
System.out.println("Current state: " + state);
if (state == OperationState.CANCELED || state == OperationState.CLOSED || state == OperationState.FINISHED || state == OperationState.ERROR) {
isQueryRunning=false;
}
Thread.sleep(1000);
opStatus=client.getOperationStatus(opHandle);
}
assertEquals("Operation should be in error state",OperationState.ERROR,state);
assertEquals(opStatus.getOperationException().getSQLState(),"08S01");
assertEquals(opStatus.getOperationException().getErrorCode(),1);
queryString="DROP TABLE TEST_EXEC_ASYNC_THRIFT";
client.executeStatement(sessHandle,queryString,opConf);
client.closeSession(sessHandle);
}
Class: org.apache.hive.service.cli.thrift.ThriftCliServiceTestWithCookie InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
/**
* Test synchronous query execution
* @throws Exception
*/
@Test public void testExecuteStatement() throws Exception {
Map opConf=new HashMap();
SessionHandle sessHandle=client.openSession(USERNAME,PASSWORD,opConf);
assertNotNull("Session handle should not be null",sessHandle);
String queryString="SET hive.lock.manager=" + "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager";
client.executeStatement(sessHandle,queryString,opConf);
queryString="DROP TABLE IF EXISTS TEST_EXEC_THRIFT";
client.executeStatement(sessHandle,queryString,opConf);
queryString="CREATE TABLE TEST_EXEC_THRIFT(ID STRING)";
client.executeStatement(sessHandle,queryString,opConf);
queryString="SELECT ID+1 FROM TEST_EXEC_THRIFT";
OperationHandle opHandle=client.executeStatement(sessHandle,queryString,opConf);
assertNotNull(opHandle);
OperationStatus opStatus=client.getOperationStatus(opHandle);
assertNotNull(opStatus);
OperationState state=opStatus.getState();
assertEquals("Query should be finished",OperationState.FINISHED,state);
queryString="DROP TABLE TEST_EXEC_THRIFT";
client.executeStatement(sessHandle,queryString,opConf);
client.closeSession(sessHandle);
}
Class: org.apache.hive.service.server.TestHS2HttpServer APIUtilityVerifier BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testStackServket() throws Exception {
String baseURL="http://localhost:" + ConfVars.HIVE_SERVER2_WEBUI_PORT.getDefaultValue() + "/stacks";
URL url=new URL(baseURL);
HttpURLConnection conn=(HttpURLConnection)url.openConnection();
Assert.assertEquals(HttpURLConnection.HTTP_OK,conn.getResponseCode());
BufferedReader reader=new BufferedReader(new InputStreamReader(conn.getInputStream()));
boolean contents=false;
String line;
while ((line=reader.readLine()) != null) {
if (line.contains("Process Thread Dump:")) {
contents=true;
}
}
Assert.assertTrue(contents);
}
Class: org.apache.hive.service.server.TestServerOptionsProcessor EqualityVerifier
@Test public void test(){
ServerOptionsProcessor optProcessor=new ServerOptionsProcessor("HiveServer2");
final String key="testkey";
final String value="value123";
String[] args={"-hiveconf",key + "=" + value};
Assert.assertEquals("checking system property before processing options",null,System.getProperty(key));
optProcessor.parse(args);
Assert.assertEquals("checking system property after processing options",value,System.getProperty(key));
}
Class: org.apache.hive.spark.client.TestMetricsCollection InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testInputReadMethodAggregation(){
MetricsCollection collection=new MetricsCollection();
long value=taskValue(1,1,1);
Metrics metrics1=new Metrics(value,value,value,value,value,value,value,new InputMetrics(DataReadMethod.Memory,value),null,null);
Metrics metrics2=new Metrics(value,value,value,value,value,value,value,new InputMetrics(DataReadMethod.Disk,value),null,null);
collection.addMetrics(1,1,1,metrics1);
collection.addMetrics(1,1,2,metrics2);
Metrics global=collection.getAllMetrics();
assertNotNull(global.inputMetrics);
assertEquals(DataReadMethod.Multiple,global.inputMetrics.readMethod);
}
InternalCallVerifier EqualityVerifier
@Test public void testMetricsAggregation(){
MetricsCollection collection=new MetricsCollection();
for ( int i : Arrays.asList(1,2)) {
for ( int j : Arrays.asList(1,2)) {
for ( long k : Arrays.asList(1L,2L)) {
collection.addMetrics(i,j,k,makeMetrics(i,j,k));
}
}
}
assertEquals(ImmutableSet.of(1,2),collection.getJobIds());
assertEquals(ImmutableSet.of(1,2),collection.getStageIds(1));
assertEquals(ImmutableSet.of(1L,2L),collection.getTaskIds(1,1));
Metrics task112=collection.getTaskMetrics(1,1,2);
checkMetrics(task112,taskValue(1,1,2));
Metrics stage21=collection.getStageMetrics(2,1);
checkMetrics(stage21,stageValue(2,1,2));
Metrics job1=collection.getJobMetrics(1);
checkMetrics(job1,jobValue(1,2,2));
Metrics global=collection.getAllMetrics();
checkMetrics(global,globalValue(2,2,2));
}
InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testOptionalMetrics(){
long value=taskValue(1,1,1L);
Metrics metrics=new Metrics(value,value,value,value,value,value,value,null,null,null);
MetricsCollection collection=new MetricsCollection();
for ( int i : Arrays.asList(1,2)) {
collection.addMetrics(i,1,1,metrics);
}
Metrics global=collection.getAllMetrics();
assertNull(global.inputMetrics);
assertNull(global.shuffleReadMetrics);
assertNull(global.shuffleWriteMetrics);
collection.addMetrics(3,1,1,makeMetrics(3,1,1));
Metrics global2=collection.getAllMetrics();
assertNotNull(global2.inputMetrics);
assertEquals(taskValue(3,1,1),global2.inputMetrics.bytesRead);
assertNotNull(global2.shuffleReadMetrics);
assertNotNull(global2.shuffleWriteMetrics);
}
Class: org.apache.hive.spark.client.TestSparkClient InternalCallVerifier EqualityVerifier NullVerifier HybridVerifier
@Test public void testCounters() throws Exception {
runTest(true,new TestFunction(){
@Override public void call( SparkClient client) throws Exception {
JobHandle> job=client.submit(new CounterIncrementJob());
job.get(TIMEOUT,TimeUnit.SECONDS);
SparkCounters counters=job.getSparkCounters();
assertNotNull(counters);
long expected=1 + 2 + 3+ 4+ 5;
assertEquals(expected,counters.getCounter("group1","counter1").getValue());
assertEquals(expected,counters.getCounter("group2","counter2").getValue());
}
}
);
}
InternalCallVerifier EqualityVerifier
@Test public void testSyncRpc() throws Exception {
runTest(true,new TestFunction(){
@Override public void call( SparkClient client) throws Exception {
Future result=client.run(new SyncRpc());
assertEquals("Hello",result.get(TIMEOUT,TimeUnit.SECONDS));
}
}
);
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testAddJarsAndFiles() throws Exception {
runTest(true,new TestFunction(){
@Override public void call( SparkClient client) throws Exception {
File jar=null;
File file=null;
try {
jar=File.createTempFile("test",".jar");
JarOutputStream jarFile=new JarOutputStream(new FileOutputStream(jar));
jarFile.putNextEntry(new ZipEntry("test.resource"));
jarFile.write("test resource".getBytes("UTF-8"));
jarFile.closeEntry();
jarFile.close();
client.addJar(new URI("file:" + jar.getAbsolutePath())).get(TIMEOUT,TimeUnit.SECONDS);
String result=client.submit(new JarJob()).get(TIMEOUT,TimeUnit.SECONDS);
assertEquals("test resource",result);
file=File.createTempFile("test",".file");
FileOutputStream fileStream=new FileOutputStream(file);
fileStream.write("test file".getBytes("UTF-8"));
fileStream.close();
client.addJar(new URI("file:" + file.getAbsolutePath())).get(TIMEOUT,TimeUnit.SECONDS);
result=client.submit(new FileJob(file.getName())).get(TIMEOUT,TimeUnit.SECONDS);
assertEquals("test file",result);
}
finally {
if (jar != null) {
jar.delete();
}
if (file != null) {
file.delete();
}
}
}
}
);
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testMetricsCollection() throws Exception {
runTest(true,new TestFunction(){
@Override public void call( SparkClient client) throws Exception {
JobHandle.Listener listener=newListener();
JobHandle future=client.submit(new AsyncSparkJob());
future.addListener(listener);
future.get(TIMEOUT,TimeUnit.SECONDS);
MetricsCollection metrics=future.getMetrics();
assertEquals(1,metrics.getJobIds().size());
assertTrue(metrics.getAllMetrics().executorRunTime >= 0L);
verify(listener).onSparkJobStarted(same(future),eq(metrics.getJobIds().iterator().next()));
JobHandle.Listener listener2=newListener();
JobHandle future2=client.submit(new AsyncSparkJob());
future2.addListener(listener2);
future2.get(TIMEOUT,TimeUnit.SECONDS);
MetricsCollection metrics2=future2.getMetrics();
assertEquals(1,metrics2.getJobIds().size());
assertFalse(Objects.equal(metrics.getJobIds(),metrics2.getJobIds()));
assertTrue(metrics2.getAllMetrics().executorRunTime >= 0L);
verify(listener2).onSparkJobStarted(same(future2),eq(metrics2.getJobIds().iterator().next()));
}
}
);
}
InternalCallVerifier EqualityVerifier
@Test public void testSimpleSparkJob() throws Exception {
runTest(true,new TestFunction(){
@Override public void call( SparkClient client) throws Exception {
JobHandle handle=client.submit(new SparkJob());
assertEquals(Long.valueOf(5L),handle.get(TIMEOUT,TimeUnit.SECONDS));
}
}
);
}
InternalCallVerifier EqualityVerifier
@Test public void testRemoteClient() throws Exception {
runTest(false,new TestFunction(){
@Override public void call( SparkClient client) throws Exception {
JobHandle handle=client.submit(new SparkJob());
assertEquals(Long.valueOf(5L),handle.get(TIMEOUT,TimeUnit.SECONDS));
}
}
);
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testJobSubmission() throws Exception {
runTest(true,new TestFunction(){
@Override public void call( SparkClient client) throws Exception {
JobHandle.Listener listener=newListener();
JobHandle handle=client.submit(new SimpleJob());
handle.addListener(listener);
assertEquals("hello",handle.get(TIMEOUT,TimeUnit.SECONDS));
assertFalse(((JobHandleImpl)handle).changeState(JobHandle.State.SENT));
verify(listener).onJobQueued(handle);
verify(listener).onJobStarted(handle);
verify(listener).onJobSucceeded(same(handle),eq(handle.get()));
}
}
);
}
Class: org.apache.hive.spark.client.rpc.TestKryoMessageCodec EqualityVerifier
@Test public void testFragmentation() throws Exception {
ByteBuf buf=newBuffer();
Object[] messages={"msg1","msg2"};
int[] indices=new int[messages.length];
KryoMessageCodec codec=new KryoMessageCodec(0);
for (int i=0; i < messages.length; i++) {
codec.encode(null,messages[i],buf);
indices[i]=buf.writerIndex();
}
List objects=Lists.newArrayList();
codec.decode(null,buf.slice(0,indices[0] - 1),objects);
assertEquals(0,objects.size());
codec.decode(null,buf.slice(0,indices[0] + 1),objects);
assertEquals(1,objects.size());
}
BooleanVerifier EqualityVerifier HybridVerifier
@Test public void testAutoRegistration() throws Exception {
KryoMessageCodec codec=new KryoMessageCodec(0,TestMessage.class);
ByteBuf buf=newBuffer();
codec.encode(null,new TestMessage(),buf);
List out=Lists.newArrayList();
codec.decode(null,buf,out);
assertEquals(1,out.size());
assertTrue(out.get(0) instanceof TestMessage);
}
APIUtilityVerifier EqualityVerifier
@Test public void testEncryptDecrypt() throws Exception {
List objects=encodeAndDecode(MESSAGE,new TestEncryptionHandler(true,true));
assertEquals(1,objects.size());
assertEquals(MESSAGE,objects.get(0));
}
APIUtilityVerifier EqualityVerifier
@Test public void testKryoCodec() throws Exception {
List objects=encodeAndDecode(MESSAGE,null);
assertEquals(1,objects.size());
assertEquals(MESSAGE,objects.get(0));
}
BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testEmbeddedChannel() throws Exception {
EmbeddedChannel c=new EmbeddedChannel(new LoggingHandler(getClass()),new KryoMessageCodec(0));
c.writeAndFlush(MESSAGE);
assertEquals(1,c.outboundMessages().size());
assertFalse(MESSAGE.getClass().equals(c.outboundMessages().peek().getClass()));
c.writeInbound(c.readOutbound());
assertEquals(1,c.inboundMessages().size());
assertEquals(MESSAGE,c.readInbound());
c.close();
}
Class: org.apache.hive.spark.client.rpc.TestRpc APIUtilityVerifier BooleanVerifier InternalCallVerifier EqualityVerifier HybridVerifier
@Test public void testClientServer() throws Exception {
RpcServer server=autoClose(new RpcServer(emptyConfig));
Rpc[] rpcs=createRpcConnection(server);
Rpc serverRpc=rpcs[0];
Rpc client=rpcs[1];
TestMessage outbound=new TestMessage("Hello World!");
Future call=client.call(outbound,TestMessage.class);
TestMessage reply=call.get(10,TimeUnit.SECONDS);
assertEquals(outbound.message,reply.message);
TestMessage another=new TestMessage("Hello again!");
Future anotherCall=client.call(another,TestMessage.class);
TestMessage anotherReply=anotherCall.get(10,TimeUnit.SECONDS);
assertEquals(another.message,anotherReply.message);
String errorMsg="This is an error.";
try {
client.call(new ErrorCall(errorMsg)).get(10,TimeUnit.SECONDS);
}
catch ( ExecutionException ee) {
assertTrue(ee.getCause() instanceof RpcException);
assertTrue(ee.getCause().getMessage().indexOf(errorMsg) >= 0);
}
TestMessage serverMsg=new TestMessage("Hello from the server!");
Future serverCall=serverRpc.call(serverMsg,TestMessage.class);
TestMessage serverReply=serverCall.get(10,TimeUnit.SECONDS);
assertEquals(serverMsg.message,serverReply.message);
}
InternalCallVerifier EqualityVerifier
@Test public void testRpcDispatcher() throws Exception {
Rpc serverRpc=autoClose(Rpc.createEmbedded(new TestDispatcher()));
Rpc clientRpc=autoClose(Rpc.createEmbedded(new TestDispatcher()));
TestMessage outbound=new TestMessage("Hello World!");
Future call=clientRpc.call(outbound,TestMessage.class);
LOG.debug("Transferring messages...");
transfer(serverRpc,clientRpc);
TestMessage reply=call.get(10,TimeUnit.SECONDS);
assertEquals(outbound.message,reply.message);
}
EqualityVerifier
@Test public void testCloseListener() throws Exception {
RpcServer server=autoClose(new RpcServer(emptyConfig));
Rpc[] rpcs=createRpcConnection(server);
Rpc client=rpcs[1];
final AtomicInteger closeCount=new AtomicInteger();
client.addListener(new Rpc.Listener(){
@Override public void rpcClosed( Rpc rpc){
closeCount.incrementAndGet();
}
}
);
client.close();
client.close();
assertEquals(1,closeCount.get());
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testEncryption() throws Exception {
Map eConf=ImmutableMap.builder().putAll(emptyConfig).put(RpcConfiguration.RPC_SASL_OPT_PREFIX + "qop",Rpc.SASL_AUTH_CONF).build();
RpcServer server=autoClose(new RpcServer(eConf));
Rpc[] rpcs=createRpcConnection(server,eConf);
Rpc client=rpcs[1];
TestMessage outbound=new TestMessage("Hello World!");
Future call=client.call(outbound,TestMessage.class);
TestMessage reply=call.get(10,TimeUnit.SECONDS);
assertEquals(outbound.message,reply.message);
}
Class: org.apache.orc.impl.TestBitFieldReader APIUtilityVerifier IterativeVerifier BranchVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier
@Test public void testBiggerItems() throws Exception {
TestInStream.OutputCollector collect=new TestInStream.OutputCollector();
final int COUNT=16384;
BitFieldWriter out=new BitFieldWriter(new OutStream("test",500,null,collect),3);
for (int i=0; i < COUNT; ++i) {
if (i < COUNT / 2) {
out.write(i & 7);
}
else {
out.write((i / 3) & 7);
}
}
out.flush();
ByteBuffer inBuf=ByteBuffer.allocate(collect.buffer.size());
collect.buffer.setByteBuffer(inBuf,0,collect.buffer.size());
inBuf.flip();
BitFieldReader in=new BitFieldReader(InStream.create("test",new ByteBuffer[]{inBuf},new long[]{0},inBuf.remaining(),null,500),3);
for (int i=0; i < COUNT; ++i) {
int x=in.next();
if (i < COUNT / 2) {
assertEquals(i & 7,x);
}
else {
assertEquals((i / 3) & 7,x);
}
}
}
APIUtilityVerifier IterativeVerifier BranchVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier
@Test public void testSkips() throws Exception {
TestInStream.OutputCollector collect=new TestInStream.OutputCollector();
BitFieldWriter out=new BitFieldWriter(new OutStream("test",100,null,collect),1);
final int COUNT=16384;
for (int i=0; i < COUNT; ++i) {
if (i < COUNT / 2) {
out.write(i & 1);
}
else {
out.write((i / 3) & 1);
}
}
out.flush();
ByteBuffer inBuf=ByteBuffer.allocate(collect.buffer.size());
collect.buffer.setByteBuffer(inBuf,0,collect.buffer.size());
inBuf.flip();
BitFieldReader in=new BitFieldReader(InStream.create("test",new ByteBuffer[]{inBuf},new long[]{0},inBuf.remaining(),null,100),1);
for (int i=0; i < COUNT; i+=5) {
int x=(int)in.next();
if (i < COUNT / 2) {
assertEquals(i & 1,x);
}
else {
assertEquals((i / 3) & 1,x);
}
if (i < COUNT - 5) {
in.skip(4);
}
in.skip(0);
}
}
Class: org.apache.orc.impl.TestDynamicArray InternalCallVerifier EqualityVerifier
@Test public void testByteArray() throws Exception {
DynamicByteArray dba=new DynamicByteArray(3,10);
dba.add((byte)0);
dba.add((byte)1);
dba.set(3,(byte)3);
dba.set(2,(byte)2);
dba.add((byte)4);
assertEquals("{0,1,2,3,4}",dba.toString());
assertEquals(5,dba.size());
byte[] val;
val=new byte[0];
assertEquals(0,dba.compare(val,0,0,2,0));
assertEquals(-1,dba.compare(val,0,0,2,1));
val=new byte[]{3,42};
assertEquals(1,dba.compare(val,0,1,2,0));
assertEquals(1,dba.compare(val,0,1,2,1));
assertEquals(0,dba.compare(val,0,1,3,1));
assertEquals(-1,dba.compare(val,0,1,3,2));
assertEquals(1,dba.compare(val,0,2,3,1));
val=new byte[256];
for (int b=-128; b < 128; ++b) {
dba.add((byte)b);
val[b + 128]=(byte)b;
}
assertEquals(0,dba.compare(val,0,256,5,256));
assertEquals(1,dba.compare(val,0,1,0,1));
assertEquals(1,dba.compare(val,254,1,0,1));
assertEquals(1,dba.compare(val,120,1,64,1));
val=new byte[1024];
Random rand=new Random(1701);
for (int i=0; i < val.length; ++i) {
rand.nextBytes(val);
}
dba.add(val,0,1024);
assertEquals(1285,dba.size());
assertEquals(0,dba.compare(val,0,1024,261,1024));
}
IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testIntArray() throws Exception {
DynamicIntArray dia=new DynamicIntArray(10);
for (int i=0; i < 10000; ++i) {
dia.add(2 * i);
}
assertEquals(10000,dia.size());
for (int i=0; i < 10000; ++i) {
assertEquals(2 * i,dia.get(i));
}
dia.clear();
assertEquals(0,dia.size());
dia.add(3);
dia.add(12);
dia.add(65);
assertEquals("{3,12,65}",dia.toString());
for (int i=0; i < 5; ++i) {
dia.increment(i,3);
}
assertEquals("{6,15,68,3,3}",dia.toString());
}
Class: org.apache.orc.impl.TestInStream APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier
@Test public void testUncompressed() throws Exception {
OutputCollector collect=new OutputCollector();
OutStream out=new OutStream("test",100,null,collect);
PositionCollector[] positions=new PositionCollector[1024];
for (int i=0; i < 1024; ++i) {
positions[i]=new PositionCollector();
out.getPosition(positions[i]);
out.write(i);
}
out.flush();
assertEquals(1024,collect.buffer.size());
for (int i=0; i < 1024; ++i) {
assertEquals((byte)i,collect.buffer.get(i));
}
ByteBuffer inBuf=ByteBuffer.allocate(collect.buffer.size());
collect.buffer.setByteBuffer(inBuf,0,collect.buffer.size());
inBuf.flip();
InStream in=InStream.create("test",new ByteBuffer[]{inBuf},new long[]{0},inBuf.remaining(),null,100);
assertEquals("uncompressed stream test position: 0 length: 1024" + " range: 0 offset: 0 limit: 0",in.toString());
for (int i=0; i < 1024; ++i) {
int x=in.read();
assertEquals(i & 0xff,x);
}
for (int i=1023; i >= 0; --i) {
in.seek(positions[i]);
assertEquals(i & 0xff,in.read());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier
@Test public void testCompressed() throws Exception {
OutputCollector collect=new OutputCollector();
CompressionCodec codec=new ZlibCodec();
OutStream out=new OutStream("test",300,codec,collect);
PositionCollector[] positions=new PositionCollector[1024];
for (int i=0; i < 1024; ++i) {
positions[i]=new PositionCollector();
out.getPosition(positions[i]);
out.write(i);
}
out.flush();
assertEquals("test",out.toString());
assertEquals(961,collect.buffer.size());
ByteBuffer inBuf=ByteBuffer.allocate(collect.buffer.size());
collect.buffer.setByteBuffer(inBuf,0,collect.buffer.size());
inBuf.flip();
InStream in=InStream.create("test",new ByteBuffer[]{inBuf},new long[]{0},inBuf.remaining(),codec,300);
assertEquals("compressed stream test position: 0 length: 961 range: 0" + " offset: 0 limit: 0 range 0 = 0 to 961",in.toString());
for (int i=0; i < 1024; ++i) {
int x=in.read();
assertEquals(i & 0xff,x);
}
assertEquals(0,in.available());
for (int i=1023; i >= 0; --i) {
in.seek(positions[i]);
assertEquals(i & 0xff,in.read());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testUncompressedDisjointBuffers() throws Exception {
OutputCollector collect=new OutputCollector();
OutStream out=new OutStream("test",400,null,collect);
PositionCollector[] positions=new PositionCollector[1024];
DataOutput stream=new DataOutputStream(out);
for (int i=0; i < 1024; ++i) {
positions[i]=new PositionCollector();
out.getPosition(positions[i]);
stream.writeInt(i);
}
out.flush();
assertEquals("test",out.toString());
assertEquals(4096,collect.buffer.size());
ByteBuffer[] inBuf=new ByteBuffer[3];
inBuf[0]=ByteBuffer.allocate(1100);
inBuf[1]=ByteBuffer.allocate(2200);
inBuf[2]=ByteBuffer.allocate(1100);
collect.buffer.setByteBuffer(inBuf[0],0,1024);
collect.buffer.setByteBuffer(inBuf[1],1024,2048);
collect.buffer.setByteBuffer(inBuf[2],3072,1024);
for (int i=0; i < inBuf.length; ++i) {
inBuf[i].flip();
}
InStream in=InStream.create("test",inBuf,new long[]{0,1024,3072},4096,null,400);
assertEquals("uncompressed stream test position: 0 length: 4096" + " range: 0 offset: 0 limit: 0",in.toString());
DataInputStream inStream=new DataInputStream(in);
for (int i=0; i < 1024; ++i) {
int x=inStream.readInt();
assertEquals(i,x);
}
assertEquals(0,in.available());
for (int i=1023; i >= 0; --i) {
in.seek(positions[i]);
assertEquals(i,inStream.readInt());
}
in=InStream.create("test",new ByteBuffer[]{inBuf[1],inBuf[2]},new long[]{1024,3072},4096,null,400);
inStream=new DataInputStream(in);
positions[256].reset();
in.seek(positions[256]);
for (int i=256; i < 1024; ++i) {
assertEquals(i,inStream.readInt());
}
in=InStream.create("test",new ByteBuffer[]{inBuf[0],inBuf[2]},new long[]{0,3072},4096,null,400);
inStream=new DataInputStream(in);
positions[768].reset();
for (int i=0; i < 256; ++i) {
assertEquals(i,inStream.readInt());
}
in.seek(positions[768]);
for (int i=768; i < 1024; ++i) {
assertEquals(i,inStream.readInt());
}
}
APIUtilityVerifier IterativeVerifier InternalCallVerifier EqualityVerifier
@Test public void testDisjointBuffers() throws Exception {
OutputCollector collect=new OutputCollector();
CompressionCodec codec=new ZlibCodec();
OutStream out=new OutStream("test",400,codec,collect);
PositionCollector[] positions=new PositionCollector[1024];
DataOutput stream=new DataOutputStream(out);
for (int i=0; i < 1024; ++i) {
positions[i]=new PositionCollector();
out.getPosition(positions[i]);
stream.writeInt(i);
}
out.flush();
assertEquals("test",out.toString());
assertEquals(1674,collect.buffer.size());
ByteBuffer[] inBuf=new ByteBuffer[3];
inBuf[0]=ByteBuffer.allocate(500);
inBuf[1]=ByteBuffer.allocate(1200);
inBuf[2]=ByteBuffer.allocate(500);
collect.buffer.setByteBuffer(inBuf[0],0,483);
collect.buffer.setByteBuffer(inBuf[1],483,1625 - 483);
collect.buffer.setByteBuffer(inBuf[2],1625,1674 - 1625);
for (int i=0; i < inBuf.length; ++i) {
inBuf[i].flip();
}
InStream in=InStream.create("test",inBuf,new long[]{0,483,1625},1674,codec,400);
assertEquals("compressed stream test position: 0 length: 1674 range: 0" + " offset: 0 limit: 0 range 0 = 0 to 483;" + " range 1 = 483 to 1142; range 2 = 1625 to 49",in.toString());
DataInputStream inStream=new DataInputStream(in);
for (int i=0; i < 1024; ++i) {
int x=inStream.readInt();
assertEquals(i,x);
}
assertEquals(0,in.available());
for (int i=1023; i >= 0; --i) {
in.seek(positions[i]);
assertEquals(i,inStream.readInt());
}
in=InStream.create("test",new ByteBuffer[]{inBuf[1],inBuf[2]},new long[]{483,1625},1674,codec,400);
inStream=new DataInputStream(in);
positions[303].reset();
in.seek(positions[303]);
for (int i=303; i < 1024; ++i) {
assertEquals(i,inStream.readInt());
}
in=InStream.create("test",new ByteBuffer[]{inBuf[0],inBuf[2]},new long[]{0,1625},1674,codec,400);
inStream=new DataInputStream(in);
positions[1001].reset();
for (int i=0; i < 300; ++i) {
assertEquals(i,inStream.readInt());
}
in.seek(positions[1001]);
for (int i=1001; i < 1024; ++i) {
assertEquals(i,inStream.readInt());
}
}
Class: org.apache.orc.impl.TestIntegerCompressionReader APIUtilityVerifier IterativeVerifier BranchVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier
@Test public void testSkips() throws Exception {
TestInStream.OutputCollector collect=new TestInStream.OutputCollector();
RunLengthIntegerWriterV2 out=new RunLengthIntegerWriterV2(new OutStream("test",100,null,collect),true);
for (int i=0; i < 2048; ++i) {
if (i < 1024) {
out.write(i);
}
else {
out.write(256 * i);
}
}
out.flush();
ByteBuffer inBuf=ByteBuffer.allocate(collect.buffer.size());
collect.buffer.setByteBuffer(inBuf,0,collect.buffer.size());
inBuf.flip();
RunLengthIntegerReaderV2 in=new RunLengthIntegerReaderV2(InStream.create("test",new ByteBuffer[]{inBuf},new long[]{0},inBuf.remaining(),null,100),true,false);
for (int i=0; i < 2048; i+=10) {
int x=(int)in.next();
if (i < 1024) {
assertEquals(i,x);
}
else {
assertEquals(256 * i,x);
}
if (i < 2038) {
in.skip(9);
}
in.skip(0);
}
}
Class: org.apache.orc.impl.TestMemoryManager InternalCallVerifier EqualityVerifier
@Test public void testBasics() throws Exception {
Configuration conf=new Configuration();
MemoryManager mgr=new MemoryManager(conf);
NullCallback callback=new NullCallback();
long poolSize=mgr.getTotalMemoryPool();
assertEquals(Math.round(ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getMax() * 0.5d),poolSize);
assertEquals(1.0,mgr.getAllocationScale(),0.00001);
mgr.addWriter(new Path("p1"),1000,callback);
assertEquals(1.0,mgr.getAllocationScale(),0.00001);
mgr.addWriter(new Path("p1"),poolSize / 2,callback);
assertEquals(1.0,mgr.getAllocationScale(),0.00001);
mgr.addWriter(new Path("p2"),poolSize / 2,callback);
assertEquals(1.0,mgr.getAllocationScale(),0.00001);
mgr.addWriter(new Path("p3"),poolSize / 2,callback);
assertEquals(0.6666667,mgr.getAllocationScale(),0.00001);
mgr.addWriter(new Path("p4"),poolSize / 2,callback);
assertEquals(0.5,mgr.getAllocationScale(),0.000001);
mgr.addWriter(new Path("p4"),3 * poolSize / 2,callback);
assertEquals(0.3333333,mgr.getAllocationScale(),0.000001);
mgr.removeWriter(new Path("p1"));
mgr.removeWriter(new Path("p2"));
assertEquals(0.5,mgr.getAllocationScale(),0.00001);
mgr.removeWriter(new Path("p4"));
assertEquals(1.0,mgr.getAllocationScale(),0.00001);
}
Class: org.apache.orc.impl.TestRunLengthByteReader APIUtilityVerifier IterativeVerifier BranchVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier
@Test public void testSkips() throws Exception {
TestInStream.OutputCollector collect=new TestInStream.OutputCollector();
RunLengthByteWriter out=new RunLengthByteWriter(new OutStream("test",100,null,collect));
for (int i=0; i < 2048; ++i) {
if (i < 1024) {
out.write((byte)(i / 16));
}
else {
out.write((byte)i);
}
}
out.flush();
ByteBuffer inBuf=ByteBuffer.allocate(collect.buffer.size());
collect.buffer.setByteBuffer(inBuf,0,collect.buffer.size());
inBuf.flip();
RunLengthByteReader in=new RunLengthByteReader(InStream.create("test",new ByteBuffer[]{inBuf},new long[]{0},inBuf.remaining(),null,100));
for (int i=0; i < 2048; i+=10) {
int x=in.next() & 0xff;
if (i < 1024) {
assertEquals((i / 16) & 0xff,x);
}
else {
assertEquals(i & 0xff,x);
}
if (i < 2038) {
in.skip(9);
}
in.skip(0);
}
}
APIUtilityVerifier IterativeVerifier BranchVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier
@Test public void testUncompressedSeek() throws Exception {
TestInStream.OutputCollector collect=new TestInStream.OutputCollector();
RunLengthByteWriter out=new RunLengthByteWriter(new OutStream("test",100,null,collect));
TestInStream.PositionCollector[] positions=new TestInStream.PositionCollector[2048];
for (int i=0; i < 2048; ++i) {
positions[i]=new TestInStream.PositionCollector();
out.getPosition(positions[i]);
if (i < 1024) {
out.write((byte)(i / 4));
}
else {
out.write((byte)i);
}
}
out.flush();
ByteBuffer inBuf=ByteBuffer.allocate(collect.buffer.size());
collect.buffer.setByteBuffer(inBuf,0,collect.buffer.size());
inBuf.flip();
RunLengthByteReader in=new RunLengthByteReader(InStream.create("test",new ByteBuffer[]{inBuf},new long[]{0},inBuf.remaining(),null,100));
for (int i=0; i < 2048; ++i) {
int x=in.next() & 0xff;
if (i < 1024) {
assertEquals((i / 4) & 0xff,x);
}
else {
assertEquals(i & 0xff,x);
}
}
for (int i=2047; i >= 0; --i) {
in.seek(positions[i]);
int x=in.next() & 0xff;
if (i < 1024) {
assertEquals((i / 4) & 0xff,x);
}
else {
assertEquals(i & 0xff,x);
}
}
}
APIUtilityVerifier IterativeVerifier BranchVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier
@Test public void testCompressedSeek() throws Exception {
CompressionCodec codec=new SnappyCodec();
TestInStream.OutputCollector collect=new TestInStream.OutputCollector();
RunLengthByteWriter out=new RunLengthByteWriter(new OutStream("test",500,codec,collect));
TestInStream.PositionCollector[] positions=new TestInStream.PositionCollector[2048];
for (int i=0; i < 2048; ++i) {
positions[i]=new TestInStream.PositionCollector();
out.getPosition(positions[i]);
if (i < 1024) {
out.write((byte)(i / 4));
}
else {
out.write((byte)i);
}
}
out.flush();
ByteBuffer inBuf=ByteBuffer.allocate(collect.buffer.size());
collect.buffer.setByteBuffer(inBuf,0,collect.buffer.size());
inBuf.flip();
RunLengthByteReader in=new RunLengthByteReader(InStream.create("test",new ByteBuffer[]{inBuf},new long[]{0},inBuf.remaining(),codec,500));
for (int i=0; i < 2048; ++i) {
int x=in.next() & 0xff;
if (i < 1024) {
assertEquals((i / 4) & 0xff,x);
}
else {
assertEquals(i & 0xff,x);
}
}
for (int i=2047; i >= 0; --i) {
in.seek(positions[i]);
int x=in.next() & 0xff;
if (i < 1024) {
assertEquals((i / 4) & 0xff,x);
}
else {
assertEquals(i & 0xff,x);
}
}
}
Class: org.apache.orc.impl.TestRunLengthIntegerReader APIUtilityVerifier IterativeVerifier BranchVerifier InternalCallVerifier EqualityVerifier PublicFieldVerifier
@Test public void testSkips() throws Exception {
TestInStream.OutputCollector collect=new TestInStream.OutputCollector();
RunLengthIntegerWriter out=new RunLengthIntegerWriter(new OutStream("test",100,null,collect),true);
for (int i=0; i < 2048; ++i) {
if (i < 1024) {
out.write(i);
}
else {
out.write(256 * i);
}
}
out.flush();
ByteBuffer inBuf=ByteBuffer.allocate(collect.buffer.size());
collect.buffer.setByteBuffer(inBuf,0,collect.buffer.size());
inBuf.flip();
RunLengthIntegerReader in=new RunLengthIntegerReader(InStream.create("test",new ByteBuffer[]{inBuf},new long[]{0},inBuf.remaining(),null,100),true);
for (int i=0; i < 2048; i+=10) {
int x=(int)in.next();
if (i < 1024) {
assertEquals(i,x);
}
else {
assertEquals(256 * i,x);
}
if (i < 2038) {
in.skip(9);
}
in.skip(0);
}
}
Class: org.apache.orc.impl.TestSerializationUtils UtilityVerifier EqualityVerifier HybridVerifier
@Test public void testSubtractionOverflowGuava(){
try {
LongMath.checkedSubtract(22222222222L,Long.MIN_VALUE);
fail("expected ArithmeticException for overflow");
}
catch ( ArithmeticException ex) {
assertEquals(ex.getMessage(),"overflow");
}
try {
LongMath.checkedSubtract(-22222222222L,Long.MAX_VALUE);
fail("expected ArithmeticException for overflow");
}
catch ( ArithmeticException ex) {
assertEquals(ex.getMessage(),"overflow");
}
try {
LongMath.checkedSubtract(Long.MIN_VALUE,Long.MAX_VALUE);
fail("expected ArithmeticException for overflow");
}
catch ( ArithmeticException ex) {
assertEquals(ex.getMessage(),"overflow");
}
assertEquals(-8106206116692740190L,LongMath.checkedSubtract(-1553103058346370095L,6553103058346370095L));
assertEquals(-Long.MAX_VALUE,LongMath.checkedSubtract(0,Long.MAX_VALUE));
assertEquals(Long.MIN_VALUE,LongMath.checkedSubtract(Long.MIN_VALUE,0));
}
APIUtilityVerifier InternalCallVerifier EqualityVerifier
@Test public void testDoubles() throws Exception {
double tolerance=0.0000000000000001;
ByteArrayOutputStream buffer=new ByteArrayOutputStream();
SerializationUtils utils=new SerializationUtils();
utils.writeDouble(buffer,1343822337.759);
assertEquals(1343822337.759,utils.readDouble(fromBuffer(buffer)),tolerance);
buffer=new ByteArrayOutputStream();
utils.writeDouble(buffer,0.8);
double got=utils.readDouble(fromBuffer(buffer));
assertEquals(0.8,got,tolerance);
}
InternalCallVerifier EqualityVerifier
@Test public void testSubtractionOverflow(){
SerializationUtils utils=new SerializationUtils();
assertEquals(false,utils.isSafeSubtract(22222222222L,Long.MIN_VALUE));
assertEquals(false,utils.isSafeSubtract(-22222222222L,Long.MAX_VALUE));
assertEquals(false,utils.isSafeSubtract(Long.MIN_VALUE,Long.MAX_VALUE));
assertEquals(true,utils.isSafeSubtract(-1553103058346370095L,6553103058346370095L));
assertEquals(true,utils.isSafeSubtract(0,Long.MAX_VALUE));
assertEquals(true,utils.isSafeSubtract(Long.MIN_VALUE,0));
}
IterativeVerifier EqualityVerifier
@Test public void testBigIntegers() throws Exception {
ByteArrayOutputStream buffer=new ByteArrayOutputStream();
SerializationUtils.writeBigInteger(buffer,BigInteger.valueOf(0));
assertArrayEquals(new byte[]{0},buffer.toByteArray());
assertEquals(0L,SerializationUtils.readBigInteger(fromBuffer(buffer)).longValue());
buffer.reset();
SerializationUtils.writeBigInteger(buffer,BigInteger.valueOf(1));
assertArrayEquals(new byte[]{2},buffer.toByteArray());
assertEquals(1L,SerializationUtils.readBigInteger(fromBuffer(buffer)).longValue());
buffer.reset();
SerializationUtils.writeBigInteger(buffer,BigInteger.valueOf(-1));
assertArrayEquals(new byte[]{1},buffer.toByteArray());
assertEquals(-1L,SerializationUtils.readBigInteger(fromBuffer(buffer)).longValue());
buffer.reset();
SerializationUtils.writeBigInteger(buffer,BigInteger.valueOf(50));
assertArrayEquals(new byte[]{100},buffer.toByteArray());
assertEquals(50L,SerializationUtils.readBigInteger(fromBuffer(buffer)).longValue());
buffer.reset();
SerializationUtils.writeBigInteger(buffer,BigInteger.valueOf(-50));
assertArrayEquals(new byte[]{99},buffer.toByteArray());
assertEquals(-50L,SerializationUtils.readBigInteger(fromBuffer(buffer)).longValue());
for (int i=-8192; i < 8192; ++i) {
buffer.reset();
SerializationUtils.writeBigInteger(buffer,BigInteger.valueOf(i));
assertEquals("compare length for " + i,i >= -64 && i < 64 ? 1 : 2,buffer.size());
assertEquals("compare result for " + i,i,SerializationUtils.readBigInteger(fromBuffer(buffer)).intValue());
}
buffer.reset();
SerializationUtils.writeBigInteger(buffer,new BigInteger("123456789abcdef0",16));
assertEquals(new BigInteger("123456789abcdef0",16),SerializationUtils.readBigInteger(fromBuffer(buffer)));
buffer.reset();
SerializationUtils.writeBigInteger(buffer,new BigInteger("-123456789abcdef0",16));
assertEquals(new BigInteger("-123456789abcdef0",16),SerializationUtils.readBigInteger(fromBuffer(buffer)));
StringBuilder buf=new StringBuilder();
for (int i=0; i < 256; ++i) {
String num=Integer.toHexString(i);
if (num.length() == 1) {
buf.append('0');
}
buf.append(num);
}
buffer.reset();
SerializationUtils.writeBigInteger(buffer,new BigInteger(buf.toString(),16));
assertEquals(new BigInteger(buf.toString(),16),SerializationUtils.readBigInteger(fromBuffer(buffer)));
buffer.reset();
SerializationUtils.writeBigInteger(buffer,new BigInteger("ff000000000000000000000000000000000000000000ff",16));
assertEquals(new BigInteger("ff000000000000000000000000000000000000000000ff",16),SerializationUtils.readBigInteger(fromBuffer(buffer)));
}
Class: org.apache.orc.impl.TestStringRedBlackTree EqualityVerifier
@Test public void test2() throws Exception {
StringRedBlackTree tree=buildTree("a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","y","z");
assertEquals(26,tree.size());
checkContents(tree,new int[]{0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25},"a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","y","z");
}
InternalCallVerifier EqualityVerifier
@Test public void test1() throws Exception {
StringRedBlackTree tree=new StringRedBlackTree(5);
assertEquals(0,tree.getSizeInBytes());
checkTree(tree);
assertEquals(0,tree.add("owen"));
checkTree(tree);
assertEquals(1,tree.add("ashutosh"));
checkTree(tree);
assertEquals(0,tree.add("owen"));
checkTree(tree);
assertEquals(2,tree.add("alan"));
checkTree(tree);
assertEquals(2,tree.add("alan"));
checkTree(tree);
assertEquals(1,tree.add("ashutosh"));
checkTree(tree);
assertEquals(3,tree.add("greg"));
checkTree(tree);
assertEquals(4,tree.add("eric"));
checkTree(tree);
assertEquals(5,tree.add("arun"));
checkTree(tree);
assertEquals(6,tree.size());
checkTree(tree);
assertEquals(6,tree.add("eric14"));
checkTree(tree);
assertEquals(7,tree.add("o"));
checkTree(tree);
assertEquals(8,tree.add("ziggy"));
checkTree(tree);
assertEquals(9,tree.add("z"));
checkTree(tree);
checkContents(tree,new int[]{2,5,1,4,6,3,7,0,9,8},"alan","arun","ashutosh","eric","eric14","greg","o","owen","z","ziggy");
assertEquals(32888,tree.getSizeInBytes());
assertEquals(3,tree.add("greg"));
assertEquals(41,tree.getCharacterSize());
assertEquals(10,tree.add("zak"));
checkTree(tree);
assertEquals(11,tree.add("eric1"));
checkTree(tree);
assertEquals(12,tree.add("ash"));
checkTree(tree);
assertEquals(13,tree.add("harry"));
checkTree(tree);
assertEquals(14,tree.add("john"));
checkTree(tree);
tree.clear();
checkTree(tree);
assertEquals(0,tree.getSizeInBytes());
assertEquals(0,tree.getCharacterSize());
}
EqualityVerifier
@Test public void test3() throws Exception {
StringRedBlackTree tree=buildTree("z","y","x","w","v","u","t","s","r","q","p","o","n","m","l","k","j","i","h","g","f","e","d","c","b","a");
assertEquals(26,tree.size());
checkContents(tree,new int[]{25,24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0},"a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","y","z");
}
Class: org.apache.orc.impl.TestZlib EqualityVerifier
@Test public void testNoOverflow() throws Exception {
ByteBuffer in=ByteBuffer.allocate(10);
ByteBuffer out=ByteBuffer.allocate(10);
in.put(new byte[]{1,2,3,4,5,6,7,10});
in.flip();
CompressionCodec codec=new ZlibCodec();
assertEquals(false,codec.compress(in,out,null));
}